Compare commits

..

No commits in common. "node-letsencrypt-python" and "master" have entirely different histories.

57 changed files with 6534 additions and 1431 deletions

73
.gitignore vendored
View File

@ -1,15 +1,26 @@
letsencrypt.work greenlock.json*
letsencrypt.logs TODO*
letsencrypt.config link.sh
.env
.greenlockrc
# generated by init
app.js
server.js
example.js
# ---> Node
# Logs # Logs
logs logs
*.log *.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data # Runtime data
pids pids
*.pid *.pid
*.seed *.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover # Directory for instrumented libs generated by jscoverage/JSCover
lib-cov lib-cov
@ -17,15 +28,61 @@ lib-cov
# Coverage directory used by tools like istanbul # Coverage directory used by tools like istanbul
coverage coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) # nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt .grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration # node-waf configuration
.lock-wscript .lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html) # Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release build/Release
# Dependency directory # Dependency directories
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git node_modules/
node_modules jspm_packages/
# TypeScript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless
# FuseBox cache
.fusebox/

8
.prettierrc Normal file
View File

@ -0,0 +1,8 @@
{
"bracketSpacing": true,
"printWidth": 80,
"singleQuote": true,
"tabWidth": 4,
"trailingComma": "none",
"useTabs": false
}

460
LICENSE
View File

@ -1,230 +1,312 @@
At your option you may choose either of the following licenses: Mozilla Public License Version 2.0
* The MIT License (MIT) 1. Definitions
* The Apache License 2.0 (Apache-2.0)
1.1. "Contributor" means each individual or legal entity that creates, contributes
to the creation of, or owns Covered Software.
The MIT License (MIT) 1.2. "Contributor Version" means the combination of the Contributions of others
(if any) used by a Contributor and that particular Contributor's Contribution.
Copyright (c) 2015 AJ ONeal 1.3. "Contribution" means Covered Software of a particular Contributor.
Permission is hereby granted, free of charge, to any person obtaining a copy 1.4. "Covered Software" means Source Code Form to which the initial Contributor
of this software and associated documentation files (the "Software"), to deal has attached the notice in Exhibit A, the Executable Form of such Source Code
in the Software without restriction, including without limitation the rights Form, and Modifications of such Source Code Form, in each case including portions
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell thereof.
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all 1.5. "Incompatible With Secondary Licenses" means
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR (a) that the initial Contributor has attached the notice described in Exhibit
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, B to the Covered Software; or
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
(b) that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a Secondary
License.
Apache License 1.6. "Executable Form" means any form of the work other than Source Code Form.
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1.7. "Larger Work" means a work that combines Covered Software with other
material, in a separate file or files, that is not Covered Software.
1. Definitions. 1.8. "License" means this document.
"License" shall mean the terms and conditions for use, reproduction, 1.9. "Licensable" means having the right to grant, to the maximum extent possible,
and distribution as defined by Sections 1 through 9 of this document. whether at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
"Licensor" shall mean the copyright owner or entity authorized by 1.10. "Modifications" means any of the following:
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all (a) any file in Source Code Form that results from an addition to, deletion
other entities that control, are controlled by, or are under common from, or modification of the contents of Covered Software; or
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity (b) any new file in Source Code Form that contains any Covered Software.
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, 1.11. "Patent Claims" of a Contributor means any patent claim(s), including
including but not limited to software source code, documentation without limitation, method, process, and apparatus claims, in any patent Licensable
source, and configuration files. by such Contributor that would be infringed, but for the grant of the License,
by the making, using, selling, offering for sale, having made, import, or
transfer of either its Contributions or its Contributor Version.
"Object" form shall mean any form resulting from mechanical 1.12. "Secondary License" means either the GNU General Public License, Version
transformation or translation of a Source form, including but 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General
not limited to compiled object code, generated documentation, Public License, Version 3.0, or any later versions of those licenses.
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or 1.13. "Source Code Form" means the form of the work preferred for making modifications.
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object 1.14. "You" (or "Your") means an individual or a legal entity exercising rights
form, that is based on (or derived from) the Work and for which the under this License. For legal entities, "You" includes any entity that controls,
editorial revisions, annotations, elaborations, or other modifications is controlled by, or is under common control with You. For purposes of this
represent, as a whole, an original work of authorship. For the purposes definition, "control" means (a) the power, direct or indirect, to cause the
of this License, Derivative Works shall not include works that remain direction or management of such entity, whether by contract or otherwise,
separable from, or merely link (or bind by name) to the interfaces of, or (b) ownership of more than fifty percent (50%) of the outstanding shares
the Work and Derivative Works thereof. or beneficial ownership of such entity.
"Contribution" shall mean any work of authorship, including 2. License Grants and Conditions
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity 2.1. Grants
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive
this License, each Contributor hereby grants to You a perpetual, license:
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of (a) under intellectual property rights (other than patent or trademark) Licensable
this License, each Contributor hereby grants to You a perpetual, by such Contributor to use, reproduce, make available, modify, display, perform,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable distribute, and otherwise exploit its Contributions, either on an unmodified
(except as stated in this section) patent license to make, have made, basis, with Modifications, or as part of a Larger Work; and
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the (b) under Patent Claims of such Contributor to make, use, sell, offer for
Work or Derivative Works thereof in any medium, with or without sale, have made, import, and otherwise transfer either its Contributions or
modifications, and in Source or Object form, provided that You its Contributor Version.
meet the following conditions:
(a) You must give any other recipients of the Work or 2.2. Effective Date
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices The licenses granted in Section 2.1 with respect to any Contribution become
stating that You changed the files; and effective for each Contribution on the date the Contributor first distributes
such Contribution.
(c) You must retain, in the Source form of any Derivative Works 2.3. Limitations on Grant Scope
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its The licenses granted in this Section 2 are the only rights granted under this
distribution, then any Derivative Works that You distribute must License. No additional rights or licenses will be implied from the distribution
include a readable copy of the attribution notices contained or licensing of Covered Software under this License. Notwithstanding Section
within such NOTICE file, excluding those notices that do not 2.1(b) above, no patent license is granted by a Contributor:
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and (a) for any code that a Contributor has removed from Covered Software; or
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, (b) for infringements caused by: (i) Your and any other third party's modifications
any Contribution intentionally submitted for inclusion in the Work of Covered Software, or (ii) the combination of its Contributions with other
by You to the Licensor shall be under the terms and conditions of software (except as part of its Contributor Version); or
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade (c) under Patent Claims infringed by Covered Software in the absence of its
names, trademarks, service marks, or product names of the Licensor, Contributions.
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or This License does not grant any rights in the trademarks, service marks, or
agreed to in writing, Licensor provides the Work (and each logos of any Contributor (except as may be necessary to comply with the notice
Contributor provides its Contributions) on an "AS IS" BASIS, requirements in Section 3.4).
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, 2.4. Subsequent Licenses
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing No Contributor makes additional grants as a result of Your choice to distribute
the Work or Derivative Works thereof, You may choose to offer, the Covered Software under a subsequent version of this License (see Section
and charge a fee for, acceptance of support, warranty, indemnity, 10.2) or under the terms of a Secondary License (if permitted under the terms
or other liability obligations and/or rights consistent with this of Section 3.3).
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS 2.5. Representation
APPENDIX: How to apply the Apache License to your work. Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the rights
to its Contributions conveyed by this License.
To apply the Apache License to your work, attach the following 2.6. Fair Use
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2015 AJ ONeal This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
Licensed under the Apache License, Version 2.0 (the "License"); 2.7. Conditions
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
Unless required by applicable law or agreed to in writing, software 3. Responsibilities
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 3.1. Distribution of Source Form
See the License for the specific language governing permissions and
limitations under the License. All distribution of Covered Software in Source Code Form, including any Modifications
that You create or to which You contribute, must be under the terms of this
License. You must inform recipients that the Source Code Form of the Covered
Software is governed by the terms of this License, and how they can obtain
a copy of this License. You may not attempt to alter or restrict the recipients'
rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the Executable
Form how they can obtain a copy of such Source Code Form by reasonable means
in a timely manner, at a charge no more than the cost of distribution to the
recipient; and
(b) You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for the
Executable Form does not attempt to limit or alter the recipients' rights
in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice, provided
that You also comply with the requirements of this License for the Covered
Software. If the Larger Work is a combination of Covered Software with a work
governed by one or more Secondary Licenses, and the Covered Software is not
Incompatible With Secondary Licenses, this License permits You to additionally
distribute such Covered Software under the terms of such Secondary License(s),
so that the recipient of the Larger Work may, at their option, further distribute
the Covered Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered Software,
except that You may alter any license notices to the extent required to remedy
known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support, indemnity
or liability obligations to one or more recipients of Covered Software. However,
You may do so only on Your own behalf, and not on behalf of any Contributor.
You must make it absolutely clear that any such warranty, support, indemnity,
or liability obligation is offered by You alone, and You hereby agree to indemnify
every Contributor for any liability incurred by such Contributor as a result
of warranty, support, indemnity or liability terms You offer. You may include
additional disclaimers of warranty and limitations of liability specific to
any jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with
all distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be sufficiently
detailed for a recipient of ordinary skill to be able to understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if
You fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor are
reinstated (a) provisionally, unless and until such Contributor explicitly
and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor
fails to notify You of the non-compliance by some reasonable means prior to
60 days after You have come back into compliance. Moreover, Your grants from
a particular Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the first
time You have received notice of non-compliance with this License from such
Contributor, and You become compliant prior to 30 days after Your receipt
of the notice.
5.2. If You initiate litigation against any entity by asserting a patent infringement
claim (excluding declaratory judgment actions, counter-claims, and cross-claims)
alleging that a Contributor Version directly or indirectly infringes any patent,
then the rights granted to You by any and all Contributors for the Covered
Software under Section 2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end
user license agreements (excluding distributors and resellers) which have
been validly granted by You or Your distributors under this License prior
to termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an "as is" basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire risk
as to the quality and performance of the Covered Software is with You. Should
any Covered Software prove defective in any respect, You (not any Contributor)
assume the cost of any necessary servicing, repair, or correction. This disclaimer
of warranty constitutes an essential part of this License. No use of any Covered
Software is authorized under this License except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any character
including, without limitation, damages for lost profits, loss of goodwill,
work stoppage, computer failure or malfunction, or any and all other commercial
damages or losses, even if such party shall have been informed of the possibility
of such damages. This limitation of liability shall not apply to liability
for death or personal injury resulting from such party's negligence to the
extent applicable law prohibits such limitation. Some jurisdictions do not
allow the exclusion or limitation of incidental or consequential damages,
so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts
of a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a party's ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it enforceable.
Any law or regulation which provides that the language of a contract shall
be construed against the drafter shall not be used to construe this License
against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section 10.3,
no one other than the license steward has the right to modify or publish new
versions of this License. Each version will be given a distinguishing version
number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or under
the terms of any subsequent version published by the license steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to create
a new license for such software, you may create and use a modified version
of this License if you rename the license and remove any references to the
name of the license steward (except to note that such modified license differs
from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With Secondary
Licenses under the terms of this version of the License, the notice described
in Exhibit B of this License must be attached. Exhibit A - Source Code Form
License Notice
This Source Code Form is subject to the terms of the Mozilla Public License,
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file,
then You may include the notice in a location (such as a LICENSE file in a
relevant directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
This Source Code Form is "Incompatible With Secondary Licenses", as defined
by the Mozilla Public License, v. 2.0.

403
MIGRATION_GUIDE.md Normal file
View File

@ -0,0 +1,403 @@
# Migrating Guide
Greenlock v4 is the current version.
# v3 to v4
v4 is a very minor, but breaking, change from v3
### `configFile` is replaced with `configDir`
The default config file `./greenlock.json` is now `./greenlock.d/config.json`.
This was change was mode to eliminate unnecessary configuration that was inadvertantly introduced in v3.
### `.greenlockrc` is auto-generated
`.greenlockrc` exists for the sake of tooling - so that the CLI, Web API, and your code naturally stay in sync.
It looks like this:
```json
{
"manager": {
"module": "@greenlock/manager"
},
"configDir": "./greenlock.d"
}
```
If you deploy to a read-only filesystem, it is best that you create the `.greenlockrc` file as part
of your image and use that rather than including any configuration in your code.
# v2 to v4
**Greenlock Express** uses Greenlock directly, the same as before.
All options described for `Greenlock.create({...})` also apply to the Greenlock Express `init()` callback.
# Overview of Major Differences
- Reduced API
- No code in the config
- (config is completely serializable)
- Manager callbacks replace `approveDomains`
- Greenlock Express does more, with less config
- cluster is supported out-of-the-box
- high-performance
- scalable
- ACME challenges are simplified
- init
- zones (dns-01)
- set
- get
- remove
- Store callbacks are simplified
- accounts
- checkKeypairs
- certificates
- checkKeypairs
- check
- set
# Greenlock JavaScript API greatly reduced
Whereas before there were many different methods with nuance differences,
now there's just `create`, `get`, `renew`, and sometimes `add` ().
- Greenlock.create({ maintainerEmail, packageAgent, notify })
- Greenlock.get({ servername, wildname, duplicate, force })
- (just a convenience wrapper around renew)
- Greenlock.renew({ subject, altnames, issuedBefore, expiresAfter })
- (retrieves, issues, renews, all-in-one)
- _optional_ Greenlock.add({ subject, altnames, subscriberEmail })
- (partially replaces `approveDomains`)
Also, some disambiguation on terms:
- `domains` was often ambiguous and confusing, it has been replaced by:
- `subject` refers to the subject of a certificate - the primary domain
- `altnames` refers to the domains in the SAN (Subject Alternative Names) section of the certificate
- `servername` refers to the TLS (SSL) SNI (Server Name Indication) request for a cetificate
- `wildname` refers to the wildcard version of the servername (ex: `www.example.com => *.example.com`)
When you create an instance of Greenlock, you only supply package and maintainer info.
All other configuration is A) optional and B) handled by the _Manager_.
```js
'use strict';
var pkg = require('./package.json');
var Greenlock = require('greenlock');
var greenlock = Greenlock.create({
// used for the ACME client User-Agent string as per RFC 8555 and RFC 7231
packageAgent: pkg.name + '/' + pkg.version,
// used as the contact for critical bug and security notices
// should be the same as pkg.author.email
maintainerEmail: 'jon@example.com',
// used for logging background events and errors
notify: function(ev, args) {
if ('error' === ev || 'warning' === ev) {
console.error(ev, args);
return;
}
console.info(ev, args);
}
});
```
By default **no certificates will be issued**. See the _manager_ section.
When you want to get a single certificate, you use `get`, which will:
- will return null if neither the `servername` or its `wildname` (wildcard) variant can be found
- retrieve a non-expired certificate, if possible
- will renew the certificate in the background, if stale
- will wait for the certificate to be issued if new
```js
greenlock
.get({ servername: 'www.example.com' })
.then(function(result) {
if (!result) {
// certificate is not on the approved list
return null;
}
var fullchain = result.pems.cert + '\n' + result.pems.chain + '\n';
var privkey = result.pems.privkey;
return {
fullchain: fullchain,
privkey: privkey
};
})
.catch(function(e) {
// something went wrong in the renew process
console.error(e);
});
```
By default **no certificates will be issued**. See the _manager_ section.
When you want to renew certificates, _en masse_, you use `renew`, which will:
- check all certificates matching the given criteria
- only renew stale certificates by default
- return error objects (will NOT throw exception for failed renewals)
```js
greenlock
.renew({})
.then(function(results) {
if (!result.length) {
// no certificates found
return null;
}
// [{ site, error }]
return results;
})
.catch(function(e) {
// an unexpected error, not related to renewal
console.error(e);
});
```
Options:
| Option | Description |
| ------------- | -------------------------------------------------------------------------- |
| `altnames` | only check and renew certs matching these altnames (including wildcards) |
| `renewBefore` | only check and renew certs marked for renewal before the given date, in ms |
| `duplicate` | renew certificates regardless of timing |
| `force` | allow silly things, like tiny `renewOffset`s |
By default **no certificates will be issued**. See the _manager_ section.
# Greenlock Express Example
The options that must be returned from `init()` are the same that are used in `Greenlock.create()`,
with a few extra that are specific to Greenlock Express:
```js
require('@root/greenlock-express')
.init(function() {
// This object will be passed to Greenlock.create()
var options = {
// some options, like cluster, are special to Greenlock Express
cluster: false,
// The rest are the same as for Greenlock
packageAgent: pkg.name + '/' + pkg.version,
maintainerEmail: 'jon@example.com',
notify: function(ev, args) {
console.info(ev, args);
}
};
return options;
})
.serve(function(glx) {
// will start servers on port 80 and 443
glx.serveApp(function(req, res) {
res.end('Hello, Encrypted World!');
});
// you can get access to the raw server (i.e. for websockets)
glx.httpsServer(); // returns raw server object
});
```
# _Manager_ replaces `approveDomains`
`approveDomains` was always a little confusing. Most people didn't need it.
Instead, now there is a simple config file that will work for most people,
as well as a set of callbacks for easy configurability.
### Default Manager
The default manager is `@greenlock/manager` and the default `configDir` is `./.greenlock.d`.
The config file should look something like this:
`./greenlock.d/config.json`:
```json
{
"subscriberEmail": "jon@example.com",
"agreeToTerms": true,
"sites": {
"example.com": {
"subject": "example.com",
"altnames": ["example.com", "www.example.com"]
}
}
}
```
You can specify a `acme-dns-01-*` or `acme-http-01-*` challenge plugin globally, or per-site.
```json
{
"subscriberEmail": "jon@example.com",
"agreeToTerms": true,
"sites": {
"example.com": {
"subject": "example.com",
"altnames": ["example.com", "www.example.com"],
"challenges": {
"dns-01": {
"module": "acme-dns-01-digitalocean",
"token": "apikey-xxxxx"
}
}
}
}
}
```
The same is true with `greenlock-store-*` plugins:
```json
{
"subscriberEmail": "jon@example.com",
"agreeToTerms": true,
"sites": {
"example.com": {
"subject": "example.com",
"altnames": ["example.com", "www.example.com"]
}
},
"store": {
"module": "greenlock-store-fs",
"basePath": "~/.config/greenlock"
}
}
```
### Customer Manager, the lazy way
At the very least you have to implement `get({ servername, wildname })`.
```js
var greenlock = Greenlock.create({
packageAgent: pkg.name + '/' + pkg.version,
maintainerEmail: 'jon@example.com',
notify: notify,
packageRoot: __dirname,
manager: {
module: './manager.js'
}
});
function notify(ev, args) {
if ('error' === ev || 'warning' === ev) {
console.error(ev, args);
return;
}
console.info(ev, args);
}
```
In the simplest case you can ignore all incoming options
and return a single site config in the same format as the config file
`./manager.js`:
```js
'use strict';
module.exports.create = function() {
return {
get: async function({ servername }) {
// do something to fetch the site
var site = {
subject: 'example.com',
altnames: ['example.com', 'www.example.com']
};
return site;
}
};
};
```
If you want to use wildcards or local domains for a specific domain, you must specify the `dns-01` challenge plugin to use:
```js
'use strict';
module.exports.create = function() {
return {
get: async function({ servername }) {
// do something to fetch the site
var site = {
subject: 'example.com',
altnames: ['example.com', 'www.example.com'],
// dns-01 challenge
challenges: {
'dns-01': {
module: 'acme-dns-01-namedotcom',
apikey: 'xxxx'
}
}
};
return site;
}
};
};
```
### Customer Manager, Complete
See <https://git.rootprojects.org/root/greenlock-manager-test.js#quick-start>
# ACME Challenge Plugins
The ACME challenge plugins are just a few simple callbacks:
- `init`
- `zones` (dns-01 only)
- `set`
- `get`
- `remove`
They are described here:
- [dns-01 documentation](https://git.rootprojects.org/root/acme-dns-01-test.js)
- [http-01 documentation](https://git.rootprojects.org/root/acme-http-01-test.js)
# Key and Cert Store Plugins
Again, these are just a few simple callbacks:
- `certificates.checkKeypair`
- `certificates.check`
- `certificates.setKeypair`
- `certificates.set`
- `accounts.checkKeypair`
- `accounts.check` (optional)
- `accounts.setKeypair`
- `accounts.set` (optional)
The name `check` is used instead of `get` because they only need to return something if it exists. They do not need to fail, nor do they need to generate anything.
They are described here:
- [greenlock store documentation](https://git.rootprojects.org/root/greenlock-store-test.js)
If you are just implenting in-house and are not going to publish a module, you can also do some hack things like this:

1119
README.md

File diff suppressed because it is too large Load Diff

219
accounts.js Normal file
View File

@ -0,0 +1,219 @@
'use strict';
var A = module.exports;
var U = require('./utils.js');
var E = require('./errors.js');
var pending = {};
A._getOrCreate = function(gnlck, mconf, db, acme, args) {
var email = args.subscriberEmail || mconf.subscriberEmail;
if (!email) {
throw E.NO_SUBSCRIBER('get account', args.subject);
}
// TODO send welcome message with benefit info
return U._validMx(email)
.catch(function() {
throw E.NO_SUBSCRIBER('get account', args.subcriberEmail);
})
.then(function() {
if (pending[email]) {
return pending[email];
}
pending[email] = A._rawGetOrCreate(
gnlck,
mconf,
db,
acme,
args,
email
)
.catch(function(e) {
delete pending[email];
throw e;
})
.then(function(result) {
delete pending[email];
return result;
});
return pending[email];
});
};
// What we really need out of this is the private key and the ACME "key" id
A._rawGetOrCreate = function(gnlck, mconf, db, acme, args, email) {
var p;
if (db.check) {
p = A._checkStore(gnlck, mconf, db, acme, args, email);
} else {
p = Promise.resolve(null);
}
return p.then(function(fullAccount) {
if (!fullAccount) {
return A._newAccount(gnlck, mconf, db, acme, args, email, null);
}
if (fullAccount.keypair && fullAccount.key && fullAccount.key.kid) {
return fullAccount;
}
return A._newAccount(gnlck, mconf, db, acme, args, email, fullAccount);
});
};
A._newAccount = function(gnlck, mconf, db, acme, args, email, fullAccount) {
var keyType = args.accountKeyType || mconf.accountKeyType;
var query = {
subject: args.subject,
email: email,
subscriberEmail: email,
customerEmail: args.customerEmail,
account: fullAccount || {},
directoryUrl:
args.directoryUrl ||
mconf.directoryUrl ||
gnlck._defaults.directoryUrl
};
return U._getOrCreateKeypair(db, args.subject, query, keyType).then(
function(kresult) {
var keypair = kresult.keypair;
var accReg = {
subscriberEmail: email,
agreeToTerms:
args.agreeToTerms ||
mconf.agreeToTerms ||
gnlck._defaults.agreeToTerms,
accountKey: keypair.privateKeyJwk || keypair.private,
debug: args.debug
};
return acme.accounts.create(accReg).then(function(receipt) {
var reg = {
keypair: keypair,
receipt: receipt,
// shudder... not actually a KeyID... but so it is called anyway...
kid:
receipt &&
receipt.key &&
(receipt.key.kid || receipt.kid),
email: args.email,
subscriberEmail: email,
customerEmail: args.customerEmail
};
var keyP;
if (kresult.exists) {
keyP = Promise.resolve();
} else {
query.keypair = keypair;
query.receipt = receipt;
/*
query.server = gnlck._defaults.directoryUrl.replace(
/^https?:\/\//i,
''
);
*/
keyP = db.setKeypair(query, keypair);
}
return keyP
.then(function() {
if (!db.set) {
return Promise.resolve({
keypair: keypair
});
}
return db.set(
{
// id to be set by Store
email: email,
subscriberEmail: email,
customerEmail: args.customerEmail,
agreeTos: true,
agreeToTerms: true,
directoryUrl:
args.directoryUrl ||
mconf.directoryUrl ||
gnlck._defaults.directoryUrl
/*
server: gnlck._defaults.directoryUrl.replace(
/^https?:\/\//i,
''
)
*/
},
reg
);
})
.then(function(fullAccount) {
if (fullAccount && 'object' !== typeof fullAccount) {
throw new Error(
"accounts.set should either return 'null' or an object with an 'id' string"
);
}
if (!fullAccount) {
fullAccount = {};
}
fullAccount.keypair = keypair;
if (!fullAccount.key) {
fullAccount.key = {};
}
fullAccount.key.kid = reg.kid;
return fullAccount;
});
});
}
);
};
A._checkStore = function(gnlck, mconf, db, acme, args, email) {
if ((args.domain || args.domains) && !args.subject) {
console.warn("use 'subject' instead of 'domain'");
args.subject = args.domain;
}
var account = args.account;
if (!account) {
account = {};
}
if (args.accountKey) {
console.warn(
'rather than passing accountKey, put it directly into your account key store'
);
// TODO we probably don't need this
return U._importKeypair(args.accountKey);
}
if (!db.check) {
return Promise.resolve(null);
}
return db
.check({
//keypair: undefined,
//receipt: undefined,
email: email,
subscriberEmail: email,
customerEmail: args.customerEmail || mconf.customerEmail,
account: account,
directoryUrl:
args.directoryUrl ||
mconf.directoryUrl ||
gnlck._defaults.directoryUrl
})
.then(function(fullAccount) {
if (!fullAccount) {
return null;
}
return fullAccount;
});
};

View File

@ -1,41 +0,0 @@
'use strict';
var PromiseA = require('bluebird');
var fs = PromiseA.promisifyAll(require('fs'));
module.exports.create = function (leBinPath, defaults, opts) {
defaults.webroot = true;
defaults.renewByDefault = true;
defaults.text = true;
var LEP = require('letsencrypt-python');
var lep = PromiseA.promisifyAll(LEP.create(leBinPath, opts));
var wrapped = {
registerAsync: function (args) {
return lep.registerAsync('certonly', args);
}
, fetchAsync: function (args) {
var hostname = args.domains[0];
var crtpath = defaults.configDir + defaults.fullchainTpl.replace(/:hostname/, hostname);
var privpath = defaults.configDir + defaults.privkeyTpl.replace(/:hostname/, hostname);
return PromiseA.all([
fs.readFileAsync(privpath, 'ascii')
, fs.readFileAsync(crtpath, 'ascii')
// stat the file, not the link
, fs.statAsync(crtpath)
]).then(function (arr) {
return {
key: arr[0] // privkey.pem
, cert: arr[1] // fullchain.pem
// TODO parse centificate for lifetime / expiresAt
, issuedAt: arr[2].mtime.valueOf()
};
}, function () {
return null;
});
}
};
return wrapped;
};

91
bin/add.js Normal file
View File

@ -0,0 +1,91 @@
'use strict';
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
//var path = require('path');
//var pkgpath = path.join(__dirname, '..', 'package.json');
//var pkgpath = path.join(process.cwd(), 'package.json');
var Flags = require('./lib/flags.js');
Flags.init().then(function({ flagOptions, greenlock, mconf }) {
var myFlags = {};
[
'subject',
'altnames',
'renew-offset',
'subscriber-email',
'customer-email',
'server-key-type',
'challenge-http-01',
'challenge-http-01-xxxx',
'challenge-dns-01',
'challenge-dns-01-xxxx',
'challenge-tls-alpn-01',
'challenge-tls-alpn-01-xxxx',
'challenge',
'challenge-xxxx',
'challenge-json',
'force-save'
].forEach(function(k) {
myFlags[k] = flagOptions[k];
});
cli.parse(myFlags);
cli.main(function(argList, flags) {
Flags.mangleFlags(flags, mconf);
main(argList, flags, greenlock);
}, args);
});
async function main(_, flags, greenlock) {
if (!flags.subject || !flags.altnames) {
console.error(
'--subject and --altnames must be provided and should be valid domains'
);
process.exit(1);
return;
}
greenlock
.add(flags)
.catch(function(err) {
console.error();
console.error('error:', err.message);
console.error();
process.exit(1);
})
.then(function() {
return greenlock
._config({
servername:
flags.altnames[
Math.floor(Math.random() * flags.altnames.length)
]
})
.then(function(site) {
if (!site) {
console.info();
console.info(
'Internal bug or configuration mismatch: No config found.'
);
console.info();
process.exit(1);
return;
}
console.info();
Object.keys(site).forEach(function(k) {
if ('defaults' === k) {
console.info(k + ':');
Object.keys(site.defaults).forEach(function(key) {
var value = JSON.stringify(site.defaults[key]);
console.info('\t' + key + ':' + value);
});
} else {
console.info(k + ': ' + JSON.stringify(site[k]));
}
});
});
});
}

378
bin/certonly.js Executable file
View File

@ -0,0 +1,378 @@
'use strict';
var mkdirp = require('@root/mkdirp');
var cli = require('./cli.js');
cli.parse({
'directory-url': [
false,
' ACME Directory Resource URL',
'string',
'https://acme-v02.api.letsencrypt.org/directory',
'server,acme-url'
],
email: [
false,
' Email used for registration and recovery contact. (default: null)',
'email'
],
'agree-tos': [
false,
" Agree to the Greenlock and Let's Encrypt Subscriber Agreements",
'boolean',
false
],
'community-member': [
false,
' Submit stats to and get updates from Greenlock',
'boolean',
false
],
domains: [
false,
' Domain names to apply. For multiple domains you can enter a comma separated list of domains as a parameter. (default: [])',
'string'
],
'renew-offset': [
false,
' Positive (time after issue) or negative (time before expiry) offset, such as 30d or -45d',
'string',
'45d'
],
'renew-within': [
false,
' (ignored) use renew-offset instead',
'ignore',
undefined
],
'cert-path': [
false,
' Path to where new cert.pem is saved',
'string',
':configDir/live/:hostname/cert.pem'
],
'fullchain-path': [
false,
' Path to where new fullchain.pem (cert + chain) is saved',
'string',
':configDir/live/:hostname/fullchain.pem'
],
'bundle-path': [
false,
' Path to where new bundle.pem (fullchain + privkey) is saved',
'string',
':configDir/live/:hostname/bundle.pem'
],
'chain-path': [
false,
' Path to where new chain.pem is saved',
'string',
':configDir/live/:hostname/chain.pem'
],
'privkey-path': [
false,
' Path to where privkey.pem is saved',
'string',
':configDir/live/:hostname/privkey.pem'
],
'config-dir': [
false,
' Configuration directory.',
'string',
'~/letsencrypt/etc/'
],
store: [
false,
' The name of the storage module to use',
'string',
'greenlock-store-fs'
],
'store-xxxx': [
false,
' An option for the chosen storage module, such as --store-apikey or --store-bucket',
'bag'
],
'store-json': [
false,
' A JSON string containing all option for the chosen store module (instead of --store-xxxx)',
'json',
'{}'
],
challenge: [
false,
' The name of the HTTP-01, DNS-01, or TLS-ALPN-01 challenge module to use',
'string',
'@greenlock/acme-http-01-fs'
],
'challenge-xxxx': [
false,
' An option for the chosen challenge module, such as --challenge-apikey or --challenge-bucket',
'bag'
],
'challenge-json': [
false,
' A JSON string containing all option for the chosen challenge module (instead of --challenge-xxxx)',
'json',
'{}'
],
'skip-dry-run': [
false,
' Use with caution (and test with the staging url first). Creates an Order on the ACME server without a self-test.',
'boolean'
],
'skip-challenge-tests': [
false,
' Use with caution (and with the staging url first). Presents challenges to the ACME server without first testing locally.',
'boolean'
],
'http-01-port': [
false,
' Required to be 80 for live servers. Do not use. For special test environments only.',
'int'
],
'dns-01': [false, ' Use DNS-01 challange type', 'boolean', false],
standalone: [
false,
' Obtain certs using a "standalone" webserver.',
'boolean',
false
],
manual: [
false,
' Print the token and key to the screen and wait for you to hit enter, giving you time to copy it somewhere before continuing (uses acme-http-01-cli or acme-dns-01-cli)',
'boolean',
false
],
debug: [false, ' show traces and logs', 'boolean', false],
root: [
false,
' public_html / webroot path (may use the :hostname template such as /srv/www/:hostname)',
'string',
undefined,
'webroot-path'
],
//
// backwards compat
//
duplicate: [
false,
' Allow getting a certificate that duplicates an existing one/is an early renewal',
'boolean',
false
],
'rsa-key-size': [
false,
' (ignored) use server-key-type or account-key-type instead',
'ignore',
2048
],
'server-key-path': [
false,
' Path to privkey.pem to use for certificate (default: generate new)',
'string',
undefined,
'domain-key-path'
],
'server-key-type': [
false,
" One of 'RSA' (2048), 'RSA-3084', 'RSA-4096', 'ECDSA' (P-256), or 'P-384'. For best compatibility, security, and efficiency use the default (More bits != More security)",
'string',
'RSA'
],
'account-key-path': [
false,
' Path to privkey.pem to use for account (default: generate new)',
'string'
],
'account-key-type': [
false,
" One of 'ECDSA' (P-256), 'P-384', 'RSA', 'RSA-3084', or 'RSA-4096'. Stick with 'ECDSA' (P-256) unless you need 'RSA' (2048) for legacy compatibility. (More bits != More security)",
'string',
'P-256'
],
webroot: [false, ' (ignored) for certbot compatibility', 'ignore', false],
//, 'standalone-supported-challenges': [ false, " Supported challenges, order preferences are randomly chosen. (default: http-01,tls-alpn-01)", 'string', 'http-01']
'work-dir': [
false,
' for certbot compatibility (ignored)',
'string',
'~/letsencrypt/var/lib/'
],
'logs-dir': [
false,
' for certbot compatibility (ignored)',
'string',
'~/letsencrypt/var/log/'
],
'acme-version': [
false,
' (ignored) ACME is now RFC 8555 and prior drafts are no longer supported',
'ignore',
'rfc8555'
]
});
// ignore certonly and extraneous arguments
cli.main(function(_, options) {
console.info('');
[
'configDir',
'privkeyPath',
'certPath',
'chainPath',
'fullchainPath',
'bundlePath'
].forEach(function(k) {
if (options[k]) {
options.storeOpts[k] = options[k];
}
delete options[k];
});
if (options.workDir) {
options.challengeOpts.workDir = options.workDir;
delete options.workDir;
}
if (options.debug) {
console.debug(options);
}
var args = {};
var homedir = require('os').homedir();
Object.keys(options).forEach(function(key) {
var val = options[key];
if ('string' === typeof val) {
val = val.replace(/^~/, homedir);
}
key = key.replace(/\-([a-z0-9A-Z])/g, function(c) {
return c[1].toUpperCase();
});
args[key] = val;
});
Object.keys(args).forEach(function(key) {
var val = args[key];
if ('string' === typeof val) {
val = val.replace(/(\:configDir)|(\:config)/, args.configDir);
}
args[key] = val;
});
if (args.domains) {
args.domains = args.domains.split(',');
}
if (
!(Array.isArray(args.domains) && args.domains.length) ||
!args.email ||
!args.agreeTos ||
(!args.server && !args.directoryUrl)
) {
console.error('\nUsage:\n\ngreenlock certonly --standalone \\');
console.error(
'\t--agree-tos --email user@example.com --domains example.com \\'
);
console.error('\t--config-dir ~/acme/etc \\');
console.error('\nSee greenlock --help for more details\n');
return;
}
if (args.http01Port) {
// [@agnat]: Coerce to string. cli returns a number although we request a string.
args.http01Port = '' + args.http01Port;
args.http01Port = args.http01Port.split(',').map(function(port) {
return parseInt(port, 10);
});
}
function run() {
var challenges = {};
if (/http.?01/i.test(args.challenge)) {
challenges['http-01'] = args.challengeOpts;
}
if (/dns.?01/i.test(args.challenge)) {
challenges['dns-01'] = args.challengeOpts;
}
if (/alpn.?01/i.test(args.challenge)) {
challenges['tls-alpn-01'] = args.challengeOpts;
}
if (!Object.keys(challenges).length) {
throw new Error(
"Could not determine the challenge type for '" +
args.challengeOpts.module +
"'. Expected a name like @you/acme-xxxx-01-foo. Please name the module with http-01, dns-01, or tls-alpn-01."
);
}
args.challengeOpts.module = args.challenge;
args.storeOpts.module = args.store;
console.log('\ngot to the run step');
require(args.challenge);
require(args.store);
var greenlock = require('../').create({
maintainerEmail: args.maintainerEmail || 'coolaj86@gmail.com',
manager: './manager.js',
configFile: '~/.config/greenlock/certs.json',
challenges: challenges,
store: args.storeOpts,
renewOffset: args.renewOffset || '30d',
renewStagger: '1d'
});
// for long-running processes
if (args.renewEvery) {
setInterval(function() {
greenlock.renew({
period: args.renewEvery
});
}, args.renewEvery);
}
// TODO should greenlock.add simply always include greenlock.renew?
// the concern is conflating error events
return greenlock
.add({
subject: args.subject,
altnames: args.altnames,
subscriberEmail: args.subscriberEmail || args.email
})
.then(function(changes) {
console.info(changes);
// renew should always
return greenlock
.renew({
subject: args.subject,
force: false
})
.then(function() {});
});
}
if ('greenlock-store-fs' !== args.store) {
run();
return;
}
// TODO remove mkdirp and let greenlock-store-fs do this?
mkdirp(args.storeOpts.configDir, function(err) {
if (!err) {
run();
}
console.error(
"Could not create --config-dir '" + args.configDir + "':",
err.code
);
console.error("Try setting --config-dir '/tmp'");
return;
});
}, process.argv.slice(3));

96
bin/config.js Normal file
View File

@ -0,0 +1,96 @@
'use strict';
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
//var path = require('path');
//var pkgpath = path.join(__dirname, '..', 'package.json');
//var pkgpath = path.join(process.cwd(), 'package.json');
var Flags = require('./lib/flags.js');
Flags.init().then(function({ flagOptions, greenlock, mconf }) {
var myFlags = {};
['all', 'subject', 'servername' /*, 'servernames', 'altnames'*/].forEach(
function(k) {
myFlags[k] = flagOptions[k];
}
);
cli.parse(myFlags);
cli.main(function(argList, flags) {
Flags.mangleFlags(flags, mconf);
main(argList, flags, greenlock);
}, args);
});
async function main(_, flags, greenlock) {
var servernames = [flags.subject]
.concat([flags.servername])
//.concat(flags.servernames)
//.concat(flags.altnames)
.filter(Boolean);
delete flags.subject;
delete flags.altnames;
flags.servernames = servernames;
if (!flags.all && flags.servernames.length > 1) {
console.error('Error: should specify either --subject OR --servername');
process.exit(1);
return;
} else if (!flags.all && flags.servernames.length !== 1) {
console.error('error: missing --servername <example.com>');
process.exit(1);
return;
}
if (!flags.all) {
flags.servername = flags.servernames[0];
} else if (flags.servername) {
console.error(
'error: missing cannot have --all and --servername / --subject'
);
process.exit(1);
}
delete flags.servernames;
var getter = function() {
return greenlock._config(flags);
};
if (flags.all) {
getter = function() {
return greenlock._configAll(flags);
};
}
return getter()
.catch(function(err) {
console.error();
console.error('error:', err.message);
//console.log(err.stack);
console.error();
process.exit(1);
})
.then(function(sites) {
if (!sites) {
console.info();
if (flags.all) {
console.info('No configs found');
} else {
console.info('No config found for', flags.servername);
}
console.info();
process.exit(1);
return;
}
if (!Array.isArray(sites)) {
sites = [sites];
}
sites.forEach(function(site) {
console.info();
console.info(
'Config for ' +
JSON.stringify(flags.servername || site.subject) +
':'
);
console.info(JSON.stringify(site, null, 2));
});
});
}

62
bin/defaults.js Normal file
View File

@ -0,0 +1,62 @@
'use strict';
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
//var path = require('path');
//var pkgpath = path.join(__dirname, '..', 'package.json');
//var pkgpath = path.join(process.cwd(), 'package.json');
var Flags = require('./lib/flags.js');
Flags.init({ forceSave: true }).then(function({
flagOptions,
greenlock,
mconf
}) {
var myFlags = {};
[
'agree-to-terms',
'account-key-type',
'server-key-type',
'subscriber-email',
'renew-offset',
'store',
'store-xxxx',
'challenge-http-01-xxxx',
'challenge-dns-01',
'challenge-dns-01-xxxx',
'challenge-tls-alpn-01',
'challenge-tls-alpn-01-xxxx',
'challenge',
'challenge-xxxx',
'challenge-http-01'
].forEach(function(k) {
myFlags[k] = flagOptions[k];
});
cli.parse(myFlags);
cli.main(function(argList, flags) {
Flags.mangleFlags(flags, mconf, null, { forceSave: true });
main(argList, flags, greenlock);
}, args);
});
async function main(_, flags, greenlock) {
greenlock.manager
.defaults(flags)
.catch(function(err) {
console.error();
console.error('error:', err.message);
//console.log(err.stack);
console.error();
process.exit(1);
})
.then(function() {
return greenlock.manager.defaults();
})
.then(function(dconf) {
console.info();
console.info('Global config');
console.info(JSON.stringify(dconf, null, 2));
});
}

26
bin/greenlock.js Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env node
'use strict';
var args = process.argv.slice(2);
var arg0 = args[0];
//console.log(args);
var found = [
'certonly',
'add',
'update',
'config',
'defaults',
'remove',
'init'
].some(function(k) {
if (k === arg0) {
require('./' + k);
return true;
}
});
if (!found) {
console.error(arg0 + ': command not yet implemented');
process.exit(1);
}

162
bin/init.js Normal file
View File

@ -0,0 +1,162 @@
'use strict';
var P = require('../plugins.js');
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
var Greenlock = require('../');
var Flags = require('./lib/flags.js');
var flagOptions = Flags.flags();
var myFlags = {};
[
'config-dir',
'maintainer-email',
'cluster',
'manager',
'manager-xxxx'
].forEach(function(k) {
myFlags[k] = flagOptions[k];
});
cli.parse(myFlags);
cli.main(async function(argList, flags) {
var pkgRoot = process.cwd();
var manager = flags.manager;
if (['fs', 'cloud'].includes(manager)) {
manager = '@greenlock/manager';
}
if (['cloud'].includes(manager)) {
flags.managerOpts.cloud = true;
}
flags.manager = flags.managerOpts;
delete flags.managerOpts;
flags.manager.module = manager;
try {
if ('.' === String(manager)[0]) {
manager = require('path').resolve(pkgRoot, manager);
}
P._loadSync(manager);
} catch (e) {
try {
P._installSync(manager);
} catch (e) {
console.error(
'error:',
JSON.stringify(manager),
'could not be loaded, and could not be installed.'
);
process.exit(1);
}
}
var greenlock = Greenlock.create({
packageRoot: pkgRoot,
manager: flags.manager,
configDir: flags.configDir,
maintainerEmail: flags.maintainerEmail,
_mustPackage: true
});
await greenlock.manager.defaults();
//writeGreenlockJs(pkgdir, flags);
writeServerJs(pkgRoot, flags);
writeAppJs(pkgRoot);
/*
rc._bin_mode = true;
var Greenlock = require('../');
// this is a copy, so it's safe to modify
var greenlock = Greenlock.create(rc);
var mconf = await greenlock.manager.defaults();
var flagOptions = Flags.flags(mconf, myOpts);
*/
}, args);
/*
function writeGreenlockJs(pkgdir, flags) {
var greenlockJs = 'greenlock.js';
var fs = require('fs');
var path = require('path');
var tmpl = fs.readFileSync(
path.join(__dirname, 'tmpl/greenlock.tmpl.js'),
'utf8'
);
try {
fs.accessSync(path.join(pkgdir, greenlockJs));
console.warn("[skip] '%s' exists", greenlockJs);
return;
} catch (e) {
// continue
}
if (flags.maintainerEmail) {
tmpl = tmpl.replace(
/pkg.author/g,
JSON.stringify(flags.maintainerEmail)
);
}
fs.writeFileSync(path.join(pkgdir, greenlockJs), tmpl);
console.info("created '%s'", greenlockJs);
}
*/
function writeServerJs(pkgdir, flags) {
var serverJs = 'server.js';
var fs = require('fs');
var path = require('path');
var tmpl;
try {
fs.accessSync(path.join(pkgdir, serverJs));
console.warn("[skip] '%s' exists", serverJs);
return;
} catch (e) {
// continue
}
if (flags.cluster) {
tmpl = fs.readFileSync(
path.join(__dirname, 'tmpl/cluster.tmpl.js'),
'utf8'
);
tmpl = tmpl.replace(/cluster: false/g, 'cluster: true');
} else {
tmpl = fs.readFileSync(
path.join(__dirname, 'tmpl/server.tmpl.js'),
'utf8'
);
}
if (flags.maintainerEmail) {
tmpl = tmpl
.replace(/pkg.author/g, JSON.stringify(flags.maintainerEmail))
.replace(/\/\/maintainerEmail/g, 'maintainerEmail');
}
fs.writeFileSync(path.join(pkgdir, serverJs), tmpl);
console.info("created '%s'", serverJs);
}
function writeAppJs(pkgdir) {
var appJs = 'app.js';
var fs = require('fs');
var path = require('path');
var tmpl = fs.readFileSync(
path.join(__dirname, 'tmpl/app.tmpl.js'),
'utf8'
);
try {
fs.accessSync(path.join(pkgdir, appJs));
console.warn("[skip] '%s' exists", appJs);
return;
} catch (e) {
fs.writeFileSync(path.join(pkgdir, appJs), tmpl);
console.info("created '%s'", appJs);
}
}

240
bin/lib/cli.js Normal file
View File

@ -0,0 +1,240 @@
'use strict';
var CLI = module.exports;
var defaultConf;
var defaultOpts;
var bags = [];
CLI.parse = function(conf) {
var opts = (defaultOpts = {});
defaultConf = conf;
Object.keys(conf).forEach(function(k) {
var v = conf[k];
if (!v) {
console.error(
'Developer Error: missing cli flag definition for',
JSON.stringify(k)
);
process.exit(1);
}
var aliases = v[5];
var bag;
var bagName;
// the name of the argument set is now the 0th argument
v.unshift(k);
// v[0] flagname
// v[1] short flagname
// v[2] description
// v[3] type
// v[4] default value
// v[5] aliases
if ('bag' === v[3]) {
bag = v[0]; // 'bag-option-xxxx' => '--bag-option-'
bag = '--' + bag.replace(/xxx.*/, '');
bags.push(bag);
bagName = toBagName(bag.replace(/^--/, ''));
opts[bagName] = {};
}
if ('json' === v[3]) {
bagName = toBagName(v[0].replace(/-json$/, '')); // 'bag-option-json' => 'bagOptionOpts'
opts[bagName] = {};
} else if ('ignore' !== v[3] && 'undefined' !== typeof v[4]) {
// set the default values (where 'undefined' is not an allowed value)
opts[toCamel(k)] = v[4];
}
if (!aliases) {
aliases = [];
} else if ('string' === typeof aliases) {
aliases = aliases.split(',');
}
aliases.forEach(function(alias) {
if (alias in conf) {
throw new Error(
"Cannot alias '" +
alias +
"' from '" +
k +
"': option already exists"
);
}
conf[alias] = v;
});
});
};
CLI.main = function(cb, args) {
var leftovers = [];
var conf = defaultConf;
var opts = defaultOpts;
if (!opts) {
throw new Error("you didn't call `CLI.parse(configuration)`");
}
// TODO what's the existing API for this?
if (!args) {
args = process.argv.slice(2);
}
var flag;
var cnf;
var typ;
function grab(bag) {
var bagName = toBagName(bag);
if (bag !== flag.slice(0, bag.length)) {
return false;
}
opts[bagName][toCamel(flag.slice(bag.length))] = args.shift();
return true;
}
while (args.length) {
// take one off the top
flag = args.shift();
// mind the gap
if ('--' === flag) {
leftovers = leftovers.concat(args);
break;
}
// help!
if (
'--help' === flag ||
'-h' === flag ||
'/?' === flag ||
'help' === flag
) {
printHelp(conf);
process.exit(1);
}
// only long names are actually used
if ('--' !== flag.slice(0, 2)) {
console.error("error: unrecognized flag '" + flag + "'");
process.exit(1);
}
cnf = conf[flag.slice(2)];
if (!cnf) {
// look for arbitrary flags
if (bags.some(grab)) {
continue;
}
// other arbitrary args are not used
console.error("unrecognized elided flag '" + flag + "'");
process.exit(1);
}
// encourage switching to non-aliased version
if (flag !== '--' + cnf[0]) {
console.warn(
"use of '" +
flag +
"' is deprecated, use '--" +
cnf[0] +
"' instead"
);
}
// look for xxx-json flags
if ('json' === cnf[3]) {
try {
var json = JSON.parse(args.shift());
var bagName = toBagName(cnf[0].replace(/-json$/, ''));
Object.keys(json).forEach(function(k) {
opts[bagName][k] = json[k];
});
} catch (e) {
console.error("Could not parse option '" + flag + "' as JSON:");
console.error(e.message);
process.exit(1);
}
continue;
}
// set booleans, otherwise grab the next arg in line
typ = cnf[3];
// TODO --no-<whatever> to negate
if (Boolean === typ || 'boolean' === typ) {
opts[toCamel(cnf[0])] = true;
continue;
}
opts[toCamel(cnf[0])] = args.shift();
continue;
}
cb(leftovers, opts);
};
function toCamel(str) {
return str.replace(/-([a-z0-9])/g, function(m) {
return m[1].toUpperCase();
});
}
function toBagName(bag) {
// trim leading and trailing '-'
bag = bag.replace(/^-+/g, '').replace(/-+$/g, '');
return toCamel(bag) + 'Opts'; // '--bag-option-' => bagOptionOpts
}
function printHelp(conf) {
var flagLen = 0;
var typeLen = 0;
var defLen = 0;
Object.keys(conf).forEach(function(k) {
flagLen = Math.max(flagLen, conf[k][0].length);
typeLen = Math.max(typeLen, conf[k][3].length);
if ('undefined' !== typeof conf[k][4]) {
defLen = Math.max(
defLen,
'(Default: )'.length + String(conf[k][4]).length
);
}
});
Object.keys(conf).forEach(function(k) {
var v = conf[k];
// skip aliases
if (v[0] !== k) {
return;
}
var def = v[4];
if ('undefined' === typeof def) {
def = '';
} else {
def = '(default: ' + JSON.stringify(def) + ')';
}
var msg =
' --' +
v[0].padEnd(flagLen) +
' ' +
v[3].padStart(typeLen + 1) +
' ' +
(v[2] || '') +
' ' +
def; /*.padStart(defLen)*/
// v[0] flagname
// v[1] short flagname
// v[2] description
// v[3] type
// v[4] default value
// v[5] aliases
console.info(msg);
});
}

400
bin/lib/flags.js Normal file
View File

@ -0,0 +1,400 @@
'use strict';
var Flags = module.exports;
//var path = require('path');
var pkgRoot = process.cwd();
//var Init = require('../../lib/init.js');
// These are ALL options
// The individual CLI files each select a subset of them
Flags.flags = function(mconf, myOpts) {
// Current Manager Config
if (!mconf) {
mconf = {};
}
// Extra Override Options
if (!myOpts) {
myOpts = {};
}
return {
all: [
false,
'search all site configs rather than by --subject or --servernames',
'boolean'
],
'agree-to-terms': [
false,
"agree to the Let's Encrypts Subscriber Agreement and Greenlock Terms of Use",
'boolean'
],
subject: [
false,
'the "subject" (primary domain) of the certificate',
'string'
],
altnames: [
false,
'the "subject alternative names" (additional domains) on the certificate, the first of which MUST be the subject',
'string'
],
servername: [
false,
'a name that matches a subject or altname',
'string'
],
servernames: [
false,
'a list of names that matches a subject or altname',
'string'
],
cluster: [false, 'initialize with cluster mode on', 'boolean', false],
'renew-offset': [
false,
"time to wait until renewing the cert such as '45d' (45 days after being issued) or '-3w' (3 weeks before expiration date)",
'string',
mconf.renewOffset
],
'customer-email': [
false,
"the email address of the owner of the domain or site (not necessarily the Let's Encrypt or ACME subscriber)",
'string'
],
'subscriber-email': [
false,
"the email address of the Let's Encrypt or ACME Account subscriber (not necessarily the domain owner)",
'string'
],
'config-dir': [
false,
'the directory in which config.json and other config and storage files should be written',
'string'
],
'maintainer-email': [
false,
'the maintainance contact for security and critical bug notices',
'string'
],
'account-key-type': [
false,
"either 'P-256' (ECDSA) or 'RSA-2048' - although other values are technically supported, they don't make sense and won't work with many services (More bits != More security)",
'string',
mconf.accountKeyType
],
'server-key-type': [
false,
"either 'RSA-2048' or 'P-256' (ECDSA) - although other values are technically supported, they don't make sense and won't work with many services (More bits != More security)",
'string',
mconf.serverKeyType
],
store: [
false,
'the module name or file path of the store module to use',
'string'
//mconf.store.module
],
'store-xxxx': [
false,
'an option for the chosen store module, such as --store-apikey or --store-bucket',
'bag'
],
manager: [
false,
'the module name or file path of the manager module to use',
'string',
'@greenlock/manager'
],
'manager-xxxx': [
false,
'an option for the chosen manager module, such as --manager-apikey or --manager-dburl',
'bag'
],
challenge: [
false,
'the module name or file path of the HTTP-01, DNS-01, or TLS-ALPN-01 challenge module to use',
'string',
''
/*
Object.keys(mconf.challenges)
.map(function(typ) {
return mconf.challenges[typ].module;
})
.join(',')
*/
],
'challenge-xxxx': [
false,
'an option for the chosen challenge module, such as --challenge-apikey or --challenge-bucket',
'bag'
],
'challenge-json': [
false,
'a JSON string containing all option for the chosen challenge module (instead of --challenge-xxxx)',
'json',
'{}'
],
'challenge-http-01': [
false,
'the module name or file path of the HTTP-01 to add',
'string'
//(mconf.challenges['http-01'] || {}).module
],
'challenge-http-01-xxxx': [
false,
'an option for the chosen challenge module, such as --challenge-http-01-apikey or --challenge-http-01-bucket',
'bag'
],
'challenge-dns-01': [
false,
'the module name or file path of the DNS-01 to add',
'string'
//(mconf.challenges['dns-01'] || {}).module
],
'challenge-dns-01-xxxx': [
false,
'an option for the chosen challenge module, such as --challenge-dns-01-apikey or --challenge-dns-01-bucket',
'bag'
],
'challenge-tls-alpn-01': [
false,
'the module name or file path of the DNS-01 to add',
'string'
//(mconf.challenges['tls-alpn-01'] || {}).module
],
'challenge-tls-alpn-01-xxxx': [
false,
'an option for the chosen challenge module, such as --challenge-tls-alpn-01-apikey or --challenge-tls-alpn-01-bucket',
'bag'
],
'force-save': [
false,
"save all options for this site, even if it's the same as the defaults",
'boolean',
myOpts.forceSave || false
]
};
};
Flags.init = async function(myOpts) {
var Greenlock = require('../../');
// this is a copy, so it's safe to modify
var greenlock = Greenlock.create({
packageRoot: pkgRoot,
_mustPackage: true,
_init: true,
_bin_mode: true
});
var mconf = await greenlock.manager.defaults();
var flagOptions = Flags.flags(mconf, myOpts);
return {
flagOptions,
greenlock,
mconf
};
};
Flags.mangleFlags = function(flags, mconf, sconf, extras) {
if (extras) {
if (extras.forceSave) {
flags.forceSave = true;
}
}
//console.log('debug a:', flags);
if ('altnames' in flags) {
flags.altnames = (flags.altnames || '').split(/[,\s]+/).filter(Boolean);
}
if ('servernames' in flags) {
flags.servernames = (flags.servernames || '')
.split(/[,\s]+/)
.filter(Boolean);
}
var store;
if (flags.store) {
store = flags.storeOpts;
store.module = flags.store;
flags.store = store;
} else {
delete flags.store;
}
delete flags.storeOpts;
// If this is additive, make an object to hold all values
var isAdditive = [
['http-01', 'Http01'],
['dns-01', 'Dns01'],
['tls-alpn-01', 'TlsAlpn01']
].some(function(types) {
var typCamel = types[1];
var modname = 'challenge' + typCamel;
if (flags[modname]) {
if (!flags.challenges) {
flags.challenges = {};
}
return true;
}
});
if (isAdditive && sconf) {
// copy over the old
var schallenges = sconf.challenges || {};
Object.keys(schallenges).forEach(function(k) {
if (!flags.challenges[k]) {
flags.challenges[k] = schallenges[k];
}
});
}
var typ;
var challenge;
if (flags.challenge) {
// this varient of the flag is exclusive
flags.challenges = {};
isAdditive = false;
if (/http-01/.test(flags.challenge)) {
typ = 'http-01';
} else if (/dns-01/.test(flags.challenge)) {
typ = 'dns-01';
} else if (/tls-alpn-01/.test(flags.challenge)) {
typ = 'tls-alpn-01';
}
var modname = 'challenge';
var optsname = 'challengeOpts';
challenge = flags[optsname];
// JSON may already have module name
if (challenge.module) {
if (flags[modname] && challenge.module !== flags[modname]) {
console.error(
'module names do not match:',
JSON.stringify(challenge.module),
JSON.stringify(flags[modname])
);
process.exit(1);
}
} else {
challenge.module = flags[modname];
}
flags.challenges[typ] = challenge;
var chall = mconf.challenges[typ];
if (chall && challenge.module === chall.module) {
var keys = Object.keys(challenge);
var same =
!keys.length ||
keys.every(function(k) {
return chall[k] === challenge[k];
});
if (same && !flags.forceSave) {
delete flags.challenges;
}
}
}
delete flags.challenge;
delete flags.challengeOpts;
// Add each of the values, including the existing
[
['http-01', 'Http01'],
['dns-01', 'Dns01'],
['tls-alpn-01', 'TlsAlpn01']
].forEach(function(types) {
var typ = types[0];
var typCamel = types[1];
var modname = 'challenge' + typCamel;
var optsname = 'challenge' + typCamel + 'Opts';
var chall = mconf.challenges[typ];
var challenge = flags[optsname];
// this variant of the flag is additive
if (isAdditive && chall && flags.forceSave) {
if (flags.challenges && !flags.challenges[typ]) {
flags.challenges[typ] = chall;
}
}
if (!flags[modname]) {
delete flags[modname];
delete flags[optsname];
return;
}
// JSON may already have module name
if (challenge.module) {
if (flags[modname] && challenge.module !== flags[modname]) {
console.error(
'module names do not match:',
JSON.stringify(challenge.module),
JSON.stringify(flags[modname])
);
process.exit(1);
}
} else {
challenge.module = flags[modname];
}
if (flags[modname]) {
if (!flags.challenges) {
flags.challenges = {};
}
flags.challenges[typ] = challenge;
}
// Check to see if this is already what's set in the defaults
if (chall && challenge.module === chall.module) {
var keys = Object.keys(challenge);
// Check if all of the options are also the same
var same =
!keys.length ||
keys.every(function(k) {
return chall[k] === challenge[k];
});
if (same && !flags.forceSave) {
// If it's already the global, don't make it the per-site
delete flags[modname];
delete flags[optsname];
}
}
delete flags[modname];
delete flags[optsname];
});
[
['accountKeyType', [/256/, /384/, /EC/], 'EC-P256'],
['serverKeyType', [/RSA/], 'RSA-2048']
].forEach(function(k) {
var key = k[0];
var vals = k[1];
var val = flags[key];
if (val) {
if (
!vals.some(function(v) {
return v.test(val);
})
) {
flags[key] = k[2];
console.warn(
key,
"does not allow the value '",
val,
"' using the default '",
k[2],
"' instead."
);
}
}
});
Object.keys(flags).forEach(function(k) {
if (flags[k] === mconf[k] && !flags.forceSave) {
delete flags[k];
}
});
//console.log('debug z:', flags);
delete flags.forceSave;
};

55
bin/remove.js Normal file
View File

@ -0,0 +1,55 @@
'use strict';
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
//var path = require('path');
//var pkgpath = path.join(__dirname, '..', 'package.json');
//var pkgpath = path.join(process.cwd(), 'package.json');
var Flags = require('./lib/flags.js');
Flags.init().then(function({ flagOptions, greenlock, mconf }) {
var myFlags = {};
['subject'].forEach(function(k) {
myFlags[k] = flagOptions[k];
});
cli.parse(myFlags);
cli.main(function(argList, flags) {
Flags.mangleFlags(flags, mconf);
main(argList, flags, greenlock);
}, args);
});
async function main(_, flags, greenlock) {
if (!flags.subject) {
console.error('--subject must be provided as a valid domain');
process.exit(1);
return;
}
greenlock
.remove(flags)
.catch(function(err) {
console.error();
console.error('error:', err.message);
//console.log(err.stack);
console.error();
process.exit(1);
})
.then(function(site) {
if (!site) {
console.info();
console.info('No config found for', flags.subject);
console.info();
process.exit(1);
return;
}
console.info();
console.info(
'Deleted config for ' + JSON.stringify(flags.subject) + ':'
);
console.info(JSON.stringify(site, null, 2));
console.info();
});
}

9
bin/tmpl/app.tmpl.js Normal file
View File

@ -0,0 +1,9 @@
'use strict';
// Here's a vanilla HTTP app to start,
// but feel free to replace it with Express, Koa, etc
var app = function(req, res) {
res.end('Hello, Encrypted World!');
};
module.exports = app;

30
bin/tmpl/cluster.tmpl.js Normal file
View File

@ -0,0 +1,30 @@
'use strict';
require('greenlock-express')
.init(function() {
// var pkg = require('./package.json');
return {
// where to find .greenlockrc and set default paths
packageRoot: __dirname,
// name & version for ACME client user agent
//packageAgent: pkg.name + '/' + pkg.version,
// contact for security and critical bug notices
//maintainerEmail: pkg.author,
// where to look for configuration
configDir: './greenlock.d',
// whether or not to run at cloudscale
cluster: true
};
})
.ready(function(glx) {
var app = require('./app.js');
// Serves on 80 and 443
// Get's SSL certificates magically!
glx.serveApp(app);
});

View File

@ -0,0 +1,13 @@
'use strict';
var pkg = require('./package.json');
module.exports = require('@root/greenlock').create({
// name & version for ACME client user agent
packageAgent: pkg.name + '/' + pkg.version,
// contact for security and critical bug notices
//maintainerEmail: pkg.author,
// where to find .greenlockrc and set default paths
packageRoot: __dirname
});

20
bin/tmpl/server.tmpl.js Normal file
View File

@ -0,0 +1,20 @@
'use strict';
var app = require('./app.js');
require('greenlock-express')
.init({
packageRoot: __dirname,
// contact for security and critical bug notices
//maintainerEmail: pkg.author,
// where to look for configuration
configDir: './greenlock.d',
// whether or not to run at cloudscale
cluster: false
})
// Serves on 80 and 443
// Get's SSL certificates magically!
.serve(app);

79
bin/update.js Normal file
View File

@ -0,0 +1,79 @@
'use strict';
var args = process.argv.slice(3);
var cli = require('./lib/cli.js');
var Flags = require('./lib/flags.js');
Flags.init().then(function({ flagOptions, greenlock, mconf }) {
var myFlags = {};
[
'subject',
'altnames',
'renew-offset',
'subscriber-email',
'customer-email',
'server-key-type',
'challenge-http-01',
'challenge-http-01-xxxx',
'challenge-dns-01',
'challenge-dns-01-xxxx',
'challenge-tls-alpn-01',
'challenge-tls-alpn-01-xxxx',
'challenge',
'challenge-xxxx',
'challenge-json',
'force-save'
].forEach(function(k) {
myFlags[k] = flagOptions[k];
});
cli.parse(myFlags);
cli.main(async function(argList, flags) {
var sconf = await greenlock._config({ servername: flags.subject });
Flags.mangleFlags(flags, mconf, sconf);
main(argList, flags, greenlock);
}, args);
});
async function main(_, flags, greenlock) {
if (!flags.subject) {
console.error('--subject must be provided as a valid domain');
process.exit(1);
return;
}
greenlock
.update(flags)
.catch(function(err) {
console.error();
console.error('error:', err.message);
console.error();
process.exit(1);
})
.then(function() {
return greenlock
._config({ servername: flags.subject })
.then(function(site) {
if (!site) {
console.info();
console.info('No config found for', flags.subject);
console.info();
process.exit(1);
return;
}
console.info();
Object.keys(site).forEach(function(k) {
if ('defaults' === k) {
console.info(k + ':');
Object.keys(site.defaults).forEach(function(key) {
var value = JSON.stringify(site.defaults[key]);
console.info('\t' + key + ':' + value);
});
} else {
console.info(k + ': ' + JSON.stringify(site[k]));
}
});
});
});
}

324
certificates.js Normal file
View File

@ -0,0 +1,324 @@
'use strict';
var C = module.exports;
var U = require('./utils.js');
var CSR = require('@root/csr');
var Enc = require('@root/encoding');
var Keypairs = require('@root/keypairs');
var pending = {};
var rawPending = {};
// What the abbreviations mean
//
// gnlkc => greenlock
// mconf => manager config
// db => greenlock store instance
// acme => instance of ACME.js
// chs => instances of challenges
// acc => account
// args => site / extra options
// Certificates
C._getOrOrder = function(gnlck, mconf, db, acme, chs, acc, args) {
var email = args.subscriberEmail || mconf.subscriberEmail;
var id = args.altnames
.slice(0)
.sort()
.join(' ');
if (pending[id]) {
return pending[id];
}
pending[id] = C._rawGetOrOrder(
gnlck,
mconf,
db,
acme,
chs,
acc,
email,
args
)
.then(function(pems) {
delete pending[id];
return pems;
})
.catch(function(err) {
delete pending[id];
throw err;
});
return pending[id];
};
// Certificates
C._rawGetOrOrder = function(gnlck, mconf, db, acme, chs, acc, email, args) {
return C._check(gnlck, mconf, db, args).then(function(pems) {
// Nice and fresh? We're done!
if (pems) {
if (!C._isStale(gnlck, mconf, args, pems)) {
// return existing unexpired (although potentially stale) certificates when available
// there will be an additional .renewing property if the certs are being asynchronously renewed
//pems._type = 'current';
return pems;
}
}
// We're either starting fresh or freshening up...
var p = C._rawOrder(gnlck, mconf, db, acme, chs, acc, email, args);
var evname = pems ? 'cert_renewal' : 'cert_issue';
p.then(function(newPems) {
// notify in the background
var renewAt = C._renewWithStagger(gnlck, mconf, args, newPems);
gnlck._notify(evname, {
renewAt: renewAt,
subject: args.subject,
altnames: args.altnames
});
gnlck._notify('_cert_issue', {
renewAt: renewAt,
subject: args.subject,
altnames: args.altnames,
pems: newPems
});
}).catch(function(err) {
if (!err.context) {
err.context = evname;
}
err.subject = args.subject;
err.altnames = args.altnames;
gnlck._notify('error', err);
});
// No choice but to hang tight and wait for it
if (
!pems ||
pems.renewAt < Date.now() - 24 * 60 * 60 * 1000 ||
pems.expiresAt <= Date.now() + 24 * 60 * 60 * 1000
) {
return p;
}
// Wait it out
// TODO should we call this waitForRenewal?
if (args.waitForRenewal) {
return p;
}
// Let the certs renew in the background
return pems;
});
};
// we have another promise here because it the optional renewal
// may resolve in a different stack than the returned pems
C._rawOrder = function(gnlck, mconf, db, acme, chs, acc, email, args) {
var id = args.altnames
.slice(0)
.sort()
.join(' ');
if (rawPending[id]) {
return rawPending[id];
}
var keyType = args.serverKeyType || mconf.serverKeyType;
var query = {
subject: args.subject,
certificate: args.certificate || {},
directoryUrl:
args.directoryUrl ||
mconf.directoryUrl ||
gnlck._defaults.directoryUrl
};
rawPending[id] = U._getOrCreateKeypair(db, args.subject, query, keyType)
.then(function(kresult) {
var serverKeypair = kresult.keypair;
var domains = args.altnames.slice(0);
return CSR.csr({
jwk: serverKeypair.privateKeyJwk || serverKeypair.private,
domains: domains,
encoding: 'der'
})
.then(function(csrDer) {
// TODO let CSR support 'urlBase64' ?
return Enc.bufToUrlBase64(csrDer);
})
.then(function(csr) {
function notify(ev, opts) {
gnlck._notify(ev, opts);
}
var certReq = {
debug: args.debug || gnlck._defaults.debug,
challenges: chs,
account: acc, // only used if accounts.key.kid exists
accountKey:
acc.keypair.privateKeyJwk || acc.keypair.private,
keypair: acc.keypair, // TODO
csr: csr,
domains: domains, // because ACME.js v3 uses `domains` still, actually
onChallengeStatus: notify,
notify: notify // TODO
// TODO handle this in acme-v2
//subject: args.subject,
//altnames: args.altnames.slice(0),
};
return acme.certificates
.create(certReq)
.then(U._attachCertInfo);
})
.then(function(pems) {
if (kresult.exists) {
return pems;
}
query.keypair = serverKeypair;
return db.setKeypair(query, serverKeypair).then(function() {
return pems;
});
});
})
.then(function(pems) {
// TODO put this in the docs
// { cert, chain, privkey, subject, altnames, issuedAt, expiresAt }
// Note: the query has been updated
query.pems = pems;
return db.set(query);
})
.then(function() {
return C._check(gnlck, mconf, db, args);
})
.then(function(bundle) {
// TODO notify Manager
delete rawPending[id];
return bundle;
})
.catch(function(err) {
// Todo notify manager
delete rawPending[id];
throw err;
});
return rawPending[id];
};
// returns pems, if they exist
C._check = function(gnlck, mconf, db, args) {
var query = {
subject: args.subject,
// may contain certificate.id
certificate: args.certificate,
directoryUrl:
args.directoryUrl ||
mconf.directoryUrl ||
gnlck._defaults.directoryUrl
};
return db.check(query).then(function(pems) {
if (!pems) {
return null;
}
pems = U._attachCertInfo(pems);
// For eager management
if (args.subject && !U._certHasDomain(pems, args.subject)) {
// TODO report error, but continue the process as with no cert
return null;
}
// For lazy SNI requests
if (args.domain && !U._certHasDomain(pems, args.domain)) {
// TODO report error, but continue the process as with no cert
return null;
}
return U._getKeypair(db, args.subject, query)
.then(function(keypair) {
return Keypairs.export({
jwk: keypair.privateKeyJwk || keypair.private,
encoding: 'pem'
}).then(function(pem) {
pems.privkey = pem;
return pems;
});
})
.catch(function() {
// TODO report error, but continue the process as with no cert
return null;
});
});
};
// Certificates
C._isStale = function(gnlck, mconf, args, pems) {
if (args.duplicate) {
return true;
}
var renewAt = C._renewableAt(gnlck, mconf, args, pems);
if (Date.now() >= renewAt) {
return true;
}
return false;
};
C._renewWithStagger = function(gnlck, mconf, args, pems) {
var renewOffset = C._renewOffset(gnlck, mconf, args, pems);
var renewStagger;
try {
renewStagger = U._parseDuration(
args.renewStagger || mconf.renewStagger || 0
);
} catch (e) {
renewStagger = U._parseDuration(
args.renewStagger || mconf.renewStagger
);
}
// TODO check this beforehand
if (!args.force && renewStagger / renewOffset >= 0.5) {
renewStagger = renewOffset * 0.1;
}
if (renewOffset > 0) {
// stagger forward, away from issued at
return Math.round(
pems.issuedAt + renewOffset + Math.random() * renewStagger
);
}
// stagger backward, toward issued at
return Math.round(
pems.expiresAt + renewOffset - Math.random() * renewStagger
);
};
C._renewOffset = function(gnlck, mconf, args /*, pems*/) {
var renewOffset = U._parseDuration(
args.renewOffset || mconf.renewOffset || 0
);
var week = 1000 * 60 * 60 * 24 * 6;
if (!args.force && Math.abs(renewOffset) < week) {
throw new Error(
'developer error: `renewOffset` should always be at least a week, use `force` to not safety-check renewOffset'
);
}
return renewOffset;
};
C._renewableAt = function(gnlck, mconf, args, pems) {
if (args.renewAt) {
return args.renewAt;
}
var renewOffset = C._renewOffset(gnlck, mconf, args, pems);
if (renewOffset > 0) {
return pems.issuedAt + renewOffset;
}
return pems.expiresAt + renewOffset;
};

58
errors.js Normal file
View File

@ -0,0 +1,58 @@
'use strict';
var E = module.exports;
function create(code, msg) {
E[code] = function(ctx, msg2) {
var err = new Error(msg);
err.code = code;
err.context = ctx;
if (msg2) {
err.message += ': ' + msg2;
}
/*
Object.keys(extras).forEach(function(k) {
if ('message' === k) {
err.message += ': ' + extras[k];
} else {
err[k] = extras[k];
}
});
*/
return err;
};
}
// TODO open issues and link to them as the error url
create(
'NO_MAINTAINER',
'please supply `maintainerEmail` as a contact for security and critical bug notices'
);
create(
'BAD_ORDER',
'altnames should be in deterministic order, with subject as the first altname'
);
create('NO_SUBJECT', 'no certificate subject given');
create(
'NO_SUBSCRIBER',
'please supply `subscriberEmail` as a contact for failed renewal and certificate revocation'
);
create(
'INVALID_SUBSCRIBER',
'`subscriberEmail` is not a valid address, please check for typos'
);
create(
'INVALID_HOSTNAME',
'valid hostnames must be restricted to a-z0-9_.- and contain at least one "."'
);
create(
'INVALID_DOMAIN',
'one or more domains do not exist on public DNS SOA record'
);
create(
'NOT_UNIQUE',
'found duplicate domains, or a subdomain that overlaps a wildcard'
);
// exported for testing only
E._create = create;

View File

@ -1,53 +0,0 @@
'use strict';
var LE = require('../');
var config = require('./config-minimal');
// Note: you should make this special dir in your product and leave it empty
config.le.webrootPath = __dirname + '/../tests/acme-challenge';
config.le.server = LE.stagingServer;
//
// Manual Registration
//
var le = LE.create(config.backend, config.le);
le.register({
agreeTos: true
, domains: ['example.com'] // CHANGE TO YOUR DOMAIN
, email: 'user@example.com' // CHANGE TO YOUR EMAIL
}, function (err) {
if (err) {
console.error('[Error]: node-letsencrypt/examples/standalone');
console.error(err.stack);
} else {
console.log('success');
}
plainServer.close();
tlsServer.close();
});
//
// Express App
//
var app = require('express')();
app.use('/', le.middleware());
//
// HTTP & HTTPS servers
// (required for domain validation)
//
var plainServer = require('http').createServer(app).listen(config.plainPort, function () {
console.log('Listening http', this.address());
});
var tlsServer = require('https').createServer({
key: config.tlsKey
, cert: config.tlsCert
, SNICallback: le.sniCallback
}, app).listen(config.tlsPort, function () {
console.log('Listening http', this.address());
});

View File

@ -1,90 +0,0 @@
'use strict';
var conf = {
domains: process.argv[2]
, email: process.argv[3]
, agree: process.argv[4]
};
var port = 80;
var tlsPort = 5001;
if (!conf.domains || !conf.email || !conf.agree) {
console.error("Usage: letsencrypt <domain1,domain2> <email> agree");
console.error("Example: letsencrypt example.com,www.example.com user@example.com agree");
return;
}
var LE = require('../');
var path = require('path');
// backend-specific defaults will be passed through
// Note: Since agreeTos is a legal agreement, I would suggest not accepting it by default
var bkDefaults = {
webrootPath: path.join(__dirname, '..', 'tests', 'acme-challenge')
, fullchainTpl: '/live/:hostname/fullchain.pem'
, privkeyTpl: '/live/:hostname/privkey.pem'
, configDir: path.join(__dirname, '..', 'tests', 'letsencrypt.config')
, logsDir: path.join(__dirname, '..', 'tests', 'letsencrypt.logs')
, workDir: path.join(__dirname, '..', 'tests', 'letsencrypt.work')
, server: LE.stagingServer
, text: true
};
var leBinPath = require('os').homedir() + '/.local/share/letsencrypt/bin/letsencrypt';
var LEB = require('../backends-python');
var backend = LEB.create(leBinPath, bkDefaults, { debug: true });
var le = LE.create(backend, bkDefaults, {
/*
setChallenge: function (hostnames, key, value, cb) {
// the python backend needs fs.watch implemented
// before this would work (and even then it would be difficult)
}
, getChallenge: function (hostnames, key, cb) {
//
}
, sniRegisterCallback: function (args, certInfo, cb) {
}
, registrationFailureCallback: function (args, certInfo, cb) {
what do to when a backgrounded registration fails
}
*/
});
var localCerts = require('localhost.daplie.com-certificates');
var express = require('express');
var app = express();
app.use('/', le.middleware());
var server = require('http').createServer();
server.on('request', app);
server.listen(port, function () {
console.log('Listening http', server.address());
});
var tlsServer = require('https').createServer({
key: localCerts.key
, cert: localCerts.cert
, SNICallback: le.sniCallback
});
tlsServer.on('request', app);
tlsServer.listen(tlsPort, function () {
console.log('Listening http', tlsServer.address());
});
le.register({
agreeTos: 'agree' === conf.agree
, domains: conf.domains.split(',')
, email: conf.email
}, function (err) {
if (err) {
console.error('[Error]: node-letsencrypt/examples/standalone');
console.error(err.stack);
} else {
console.log('success');
}
server.close();
tlsServer.close();
});

View File

@ -1,31 +0,0 @@
'use strict';
var path = require('path');
var binpath = require('os').homedir() + '/.local/share/letsencrypt/bin/letsencrypt';
var config = {
plainPort: 80
, tlsPort: 5001 // 5001 for testing, normally 443
, tlsKey: require('localhost.daplie.com-certificates').key
, tlsCert: require('localhost.daplie.com-certificates').cert
, le: {
webrootPath: path.join(__dirname, '..', 'tests', 'acme-challenge')
, fullchainTpl: '/live/:hostname/fullchain.pem'
, privkeyTpl: '/live/:hostname/privkey.pem'
, configDir: path.join(__dirname, '..', 'tests', 'letsencrypt.config')
// these are specific to the python client and won't be needed with the purejs library
, logsDir: path.join(__dirname, '..', 'tests', 'letsencrypt.logs')
, workDir: path.join(__dirname, '..', 'tests', 'letsencrypt.work')
}
};
//config.backend = require('letsencrypt/backends-python').create(binpath, config.le);
config.backend = require('../backends-python').create(binpath, config.le);
module.exports = config;

View File

@ -1,57 +0,0 @@
'use strict';
var LE = require('../');
var config = require('./config-minimal');
// Note: you should make this special dir in your product and leave it empty
config.le.webrootPath = __dirname + '/../tests/acme-challenge';
config.le.server = LE.stagingServer;
var le = LE.create(config.backend, config.le, {
sniRegisterCallback: function (args, expiredCert, cb) {
// In theory you should never get an expired certificate because
// the certificates automatically renew in the background starting
// about a week before they expire.
// (the default behavior is to randomly stagger renewals)
// so in this case we'll just return the expired certificate
if (expiredCert) { return cb(null, expiredCert); }
// If we get here that means this domain hasn't been registered yet
// Security Warning: you should either manually register domains
// and return null here or check that the sni header isn't being
// spoofed and this is actually a domain you own before registering
//
// cb(null, null);
var hostname = args.domains[0];
console.log("[TODO] check that '" + hostname + "' is one I expect");
args.agreeTos = true;
args.email = 'user@example.com';
le.register(args, cb);
}
});
//
// Express App
//
var app = require('express')();
app.use('/', le.middleware());
//
// HTTP & HTTPS servers
//
require('http').createServer(app).listen(config.plainPort, function () {
console.log('Listening http', this.address());
});
require('https').createServer({
key: config.tlsKey
, cert: config.tlsCert
, SNICallback: le.sniCallback
}, app).listen(config.tlsPort, function () {
console.log('Listening http', this.address());
});

View File

@ -1,93 +0,0 @@
'use strict';
var conf = {
domains: (process.argv[2]||'').split(',')
, email: process.argv[3]
, agree: 'agree' === process.argv[4]
};
var port = 80;
var tlsPort = 5001;
if (!conf.domains || !conf.email || !conf.agree) {
console.error("Usage: node examples/express <domain1,domain2> <email> agree");
console.error("Example: node examples/express example.com,www.example.com user@example.com agree");
return;
}
var LE = require('../');
var path = require('path');
// backend-specific defaults will be passed through
// Note: Since agreeTos is a legal agreement, I would suggest not accepting it by default
var bkDefaults = {
webrootPath: path.join(__dirname, '..', 'tests', 'acme-challenge')
, fullchainTpl: '/live/:hostname/fullchain.pem'
, privkeyTpl: '/live/:hostname/privkey.pem'
, configDir: path.join(__dirname, '..', 'tests', 'letsencrypt.config')
, logsDir: path.join(__dirname, '..', 'tests', 'letsencrypt.logs')
, workDir: path.join(__dirname, '..', 'tests', 'letsencrypt.work')
, server: LE.stagingServer
};
var leBinPath = require('os').homedir() + '/.local/share/letsencrypt/bin/letsencrypt';
var LEB = require('../backends-python');
var backend = LEB.create(leBinPath, bkDefaults, { debug: true });
var le = LE.create(backend, bkDefaults, {
sniRegisterCallback: function (args, certInfo, cb) {
var allowedDomains = conf.domains; // require('../tests/config').allowedDomains;
// let the renewal take place in the background
if (certInfo && certInfo.context) {
cb(null, certInfo);
return;
}
// verify that these are domains we allow to register on our server
if (args.domains.length && args.domains.every(function (hostname) {
hostname = hostname.toLowerCase();
return (-1 !== allowedDomains.indexOf(hostname));
})) {
// wait for registration before responding
args.agreeTos = conf.agree;
args.email = conf.email; // you'd want to lookup which user has this email
le.register(args, cb);
} else {
// I don't know where this error goes (SNICallback)... but at least we put it somewhere
cb(new Error("SNI came in for (an) unrecognized domain(s): '" + args.domains + "'"));
}
}
/*
, setChallenge: function (hostnames, key, value, cb) {
// the python backend needs fs.watch implemented
// before this would work (and even then it would be difficult)
}
, getChallenge: function (hostnames, key, cb) {
//
}
, registrationFailureCallback: function (args, certInfo, cb) {
what do to when a backgrounded registration fails
}
*/
});
var localCerts = require('localhost.daplie.com-certificates');
var express = require('express');
var app = express();
app.use('/', le.middleware());
var server = require('http').createServer();
server.on('request', app);
server.listen(port, function () {
console.log('Listening http', server.address());
});
var tlsServer = require('https').createServer({
key: localCerts.key
, cert: localCerts.cert
, SNICallback: le.sniCallback
});
tlsServer.on('request', app);
tlsServer.listen(tlsPort, function () {
console.log('Listening http', tlsServer.address());
});

661
greenlock.js Normal file
View File

@ -0,0 +1,661 @@
'use strict';
var pkg = require('./package.json');
var ACME = require('@root/acme');
var Greenlock = module.exports;
var request = require('@root/request');
var process = require('process');
var G = Greenlock;
var U = require('./utils.js');
var E = require('./errors.js');
var P = require('./plugins.js');
var A = require('./accounts.js');
var C = require('./certificates.js');
var DIR = require('./lib/directory-url.js');
var ChWrapper = require('./lib/challenges-wrapper.js');
var MngWrapper = require('./lib/manager-wrapper.js');
var UserEvents = require('./user-events.js');
var Init = require('./lib/init.js');
var caches = {};
// { maintainerEmail, directoryUrl, subscriberEmail, store, challenges }
G.create = function(gconf) {
var greenlock = {};
var gdefaults = {};
if (!gconf) {
gconf = {};
}
greenlock._create = function() {
if (!gconf._bin_mode) {
if (!gconf.maintainerEmail) {
throw E.NO_MAINTAINER('create');
}
// TODO send welcome message with benefit info
U._validMx(gconf.maintainerEmail).catch(function() {
console.error(
'invalid maintainer contact info:',
gconf.maintainerEmail
);
// maybe move this to init and don't exit the process, just in case
process.exit(1);
});
}
if ('function' === typeof gconf.notify) {
gdefaults.notify = gconf.notify;
} else {
gdefaults.notify = _notify;
}
gconf = Init._init(gconf);
// OK: /path/to/blah
// OK: npm-name-blah
// NOT OK: ./rel/path/to/blah
// Error: .blah
if ('.' === (gconf.manager.module || '')[0]) {
if (!gconf.packageRoot) {
gconf.packageRoot = process.cwd();
console.warn(
'`packageRoot` not defined, trying ' + gconf.packageRoot
);
}
gconf.manager.module =
gconf.packageRoot + '/' + gconf.manager.module.slice(2);
}
// Wraps each of the following with appropriate error checking
// greenlock.manager.defaults
// greenlock.sites.add
// greenlock.sites.update
// greenlock.sites.remove
// greenlock.sites.find
// greenlock.sites.get
MngWrapper.wrap(greenlock, gconf);
// The goal here is to reduce boilerplate, such as error checking
// and duration parsing, that a manager must implement
greenlock.sites.add = greenlock.add = greenlock.manager.add;
greenlock.sites.update = greenlock.update = greenlock.manager.update;
greenlock.sites.remove = greenlock.remove = greenlock.manager.remove;
// Exports challenges.get for Greenlock Express HTTP-01,
// and whatever odd use case pops up, I suppose
// greenlock.challenges.get
ChWrapper.wrap(greenlock);
DIR._getDefaultDirectoryUrl('', gconf.staging, '');
if (gconf.directoryUrl) {
gdefaults.directoryUrl = gconf.directoryUrl;
}
greenlock._defaults = gdefaults;
greenlock._defaults.debug = gconf.debug;
if (!gconf._bin_mode && false !== gconf.renew) {
// renew every 90-ish minutes (random for staggering)
// the weak setTimeout (unref) means that when run as a CLI process this
// will still finish as expected, and not wait on the timeout
(function renew() {
setTimeout(function() {
greenlock.renew({});
renew();
}, Math.PI * 30 * 60 * 1000).unref();
})();
}
};
// The purpose of init is to make MCONF the source of truth
greenlock._init = function() {
var p;
greenlock._init = function() {
return p;
};
p = greenlock.manager
.init({
request: request
//punycode: require('punycode')
})
.then(async function() {
var MCONF = await greenlock.manager._defaults();
mergeDefaults(MCONF, gconf);
if (true === MCONF.agreeToTerms) {
gdefaults.agreeToTerms = function(tos) {
return Promise.resolve(tos);
};
}
return greenlock.manager._defaults(MCONF);
})
.catch(function(err) {
if ('load_plugin' !== err.context) {
console.error('Fatal error during greenlock init:');
console.error(err.message);
}
if (!gconf._bin_mode) {
process.exit(1);
}
});
return p;
};
greenlock.notify = greenlock._notify = function(ev, params) {
var mng = greenlock.manager;
if ('_' === String(ev)[0]) {
if ('_cert_issue' === ev) {
try {
mng.update({
subject: params.subject,
renewAt: params.renewAt
}).catch(function(e) {
e.context = '_cert_issue';
greenlock._notify('error', e);
});
} catch (e) {
e.context = '_cert_issue';
greenlock._notify('error', e);
}
}
// trap internal events internally
return;
}
try {
var p = greenlock._defaults.notify(ev, params);
if (p && p.catch) {
p.catch(function(e) {
console.error("Promise Rejection on event '" + ev + "':");
console.error(e);
});
}
} catch (e) {
console.error("Thrown Exception on event '" + ev + "':");
console.error(e);
console.error(params);
}
if (-1 !== ['cert_issue', 'cert_renewal'].indexOf(ev)) {
// We will notify all greenlock users of mandatory and security updates
// We'll keep track of versions and os so we can make sure things work well
// { name, version, email, domains, action, communityMember, telemetry }
// TODO look at the other one
UserEvents.notify({
/*
// maintainer should be only on pre-publish, or maybe install, I think
maintainerEmail: greenlock._defaults._maintainerEmail,
name: greenlock._defaults._packageAgent,
version: greenlock._defaults._maintainerPackageVersion,
//action: params.pems._type,
domains: params.altnames,
subscriberEmail: greenlock._defaults._subscriberEmail,
// TODO enable for Greenlock Pro
//customerEmail: args.customerEmail
telemetry: greenlock._defaults.telemetry
*/
});
}
};
// certs.get
greenlock.get = async function(args) {
greenlock._single(args);
args._includePems = true;
var results = await greenlock.renew(args);
if (!results || !results.length) {
// TODO throw an error here?
return null;
}
// just get the first one
var result = results[0];
// (there should be only one, ideally)
if (results.length > 1) {
var err = new Error(
"a search for '" +
args.servername +
"' returned multiple certificates"
);
err.context = 'duplicate_certs';
err.servername = args.servername;
err.subjects = results.map(function(r) {
return (r.site || {}).subject || 'N/A';
});
greenlock._notify('warning', err);
}
if (result.error) {
return Promise.reject(result.error);
}
// site for plugin options, such as http-01 challenge
// pems for the obvious reasons
return result;
};
// TODO remove async here, it doesn't matter
greenlock._single = async function(args) {
if ('string' !== typeof args.servername) {
throw new Error('no `servername` given');
}
// www.example.com => *.example.com
args.wildname =
'*.' +
args.servername
.split('.')
.slice(1)
.join('.');
if (args.wildname.split('.').length < 3) {
// No '*.com'
args.wildname = '';
}
if (
args.servernames ||
//TODO I think we need to block altnames as well, but I don't want to break anything
//args.altnames ||
args.subject ||
args.renewBefore ||
args.issueBefore ||
args.expiresBefore
) {
throw new Error(
'bad arguments, did you mean to call greenlock.renew()?'
);
}
// duplicate, force, and others still allowed
return args;
};
greenlock._config = async function(args) {
greenlock._single(args);
var sites = await greenlock._configAll(args);
return sites[0];
};
greenlock._configAll = async function(args) {
var sites = await greenlock._find(args);
if (!sites || !sites.length) {
return [];
}
sites = JSON.parse(JSON.stringify(sites));
var mconf = await greenlock.manager._defaults();
return sites.map(function(site) {
if (site.store && site.challenges) {
return site;
}
var dconf = site;
// TODO make cli and api mode the same
if (gconf._bin_mode) {
dconf = site.defaults = {};
}
if (!site.store) {
dconf.store = mconf.store;
}
if (!site.challenges) {
dconf.challenges = mconf.challenges;
}
return site;
});
};
// needs to get info about the renewal, such as which store and challenge(s) to use
greenlock.renew = async function(args) {
await greenlock._init();
var mconf = await greenlock.manager._defaults();
return greenlock._renew(mconf, args);
};
greenlock._renew = async function(mconf, args) {
if (!args) {
args = {};
}
var renewedOrFailed = [];
//console.log('greenlock._renew find', args);
var sites = await greenlock._find(args);
// Note: the manager must guaranteed that these are mutable copies
//console.log('greenlock._renew found', sites);;
if (!Array.isArray(sites)) {
throw new Error(
'Developer Error: not an array of sites returned from find: ' +
JSON.stringify(sites)
);
}
await (async function next() {
var site = sites.shift();
if (!site) {
return null;
}
var order = { site: site };
renewedOrFailed.push(order);
// TODO merge args + result?
return greenlock
._order(mconf, site)
.then(function(pems) {
if (args._includePems) {
order.pems = pems;
}
})
.catch(function(err) {
order.error = err;
// For greenlock express serialization
err.toJSON = errorToJSON;
err.context = err.context || 'cert_order';
err.subject = site.subject;
if (args.servername) {
err.servername = args.servername;
}
// for debugging, but not to be relied on
err._site = site;
// TODO err.context = err.context || 'renew_certificate'
greenlock._notify('error', err);
})
.then(function() {
return next();
});
})();
return renewedOrFailed;
};
greenlock._acme = async function(mconf, args, dirUrl) {
var packageAgent = gconf.packageAgent || '';
// because Greenlock_Express/v3.x Greenlock/v3 is redundant
if (!/greenlock/i.test(packageAgent)) {
packageAgent = (packageAgent + ' Greenlock/' + pkg.version).trim();
}
var acme = ACME.create({
maintainerEmail: gconf.maintainerEmail,
packageAgent: packageAgent,
notify: greenlock._notify,
debug: greenlock._defaults.debug || args.debug
});
var dir = caches[dirUrl];
// don't cache more than an hour
if (dir && Date.now() - dir.ts < 1 * 60 * 60 * 1000) {
return dir.promise;
}
await acme.init(dirUrl).catch(function(err) {
// TODO this is a special kind of failure mode. What should we do?
console.error(
"[debug] Let's Encrypt may be down for maintenance or `directoryUrl` may be wrong"
);
throw err;
});
caches[dirUrl] = {
promise: Promise.resolve(acme),
ts: Date.now()
};
return acme;
};
greenlock.order = async function(siteConf) {
await greenlock._init();
var mconf = await greenlock.manager._defaults();
return greenlock._order(mconf, siteConf);
};
greenlock._order = async function(mconf, siteConf) {
// packageAgent, maintainerEmail
var dirUrl = DIR._getDirectoryUrl(
siteConf.directoryUrl || mconf.directoryUrl,
siteConf.subject
);
var acme = await greenlock._acme(mconf, siteConf, dirUrl);
var storeConf = siteConf.store || mconf.store;
storeConf = JSON.parse(JSON.stringify(storeConf));
storeConf.packageRoot = gconf.packageRoot;
if (!storeConf.basePath) {
storeConf.basePath = gconf.configDir;
}
if ('.' === (storeConf.basePath || '')[0]) {
if (!gconf.packageRoot) {
gconf.packageRoot = process.cwd();
console.warn(
'`packageRoot` not defined, trying ' + gconf.packageRoot
);
}
storeConf.basePath = require('path').resolve(
gconf.packageRoot || '',
storeConf.basePath
);
}
storeConf.directoryUrl = dirUrl;
var store = await P._loadStore(storeConf);
var account = await A._getOrCreate(
greenlock,
mconf,
store.accounts,
acme,
siteConf
);
var challengeConfs = siteConf.challenges || mconf.challenges;
var challenges = {};
var arr = await Promise.all(
Object.keys(challengeConfs).map(function(typ01) {
return P._loadChallenge(challengeConfs, typ01);
})
);
arr.forEach(function(el) {
challenges[el._type] = el;
});
var pems = await C._getOrOrder(
greenlock,
mconf,
store.certificates,
acme,
challenges,
account,
siteConf
);
if (!pems) {
throw new Error('no order result');
}
if (!pems.privkey) {
throw new Error('missing private key, which is kinda important');
}
return pems;
};
greenlock._create();
return greenlock;
};
G._loadChallenge = P._loadChallenge;
function errorToJSON(e) {
var error = {};
Object.getOwnPropertyNames(e).forEach(function(k) {
error[k] = e[k];
});
return error;
}
function mergeDefaults(MCONF, gconf) {
if (
gconf.agreeToTerms === true ||
MCONF.agreeToTerms === true ||
// TODO deprecate
gconf.agreeTos === true ||
MCONF.agreeTos === true
) {
MCONF.agreeToTerms = true;
}
if (!MCONF.subscriberEmail && gconf.subscriberEmail) {
MCONF.subscriberEmail = gconf.subscriberEmail;
}
// Load the default store module
if (!MCONF.store) {
if (gconf.store) {
MCONF.store = gconf.store;
} else {
MCONF.store = {
module: 'greenlock-store-fs'
};
console.info('[default] store.module: ' + MCONF.store.module);
}
}
/*
if ('greenlock-store-fs' === MCONF.store.module && !MCONF.store.basePath) {
//homedir = require('os').homedir();
if (gconf.configFile) {
MCONF.store.basePath = gconf.configFile.replace(/\.json$/i, '.d');
} else {
MCONF.store.basePath = './greenlock.d';
}
}
*/
// just to test that it loads
P._loadSync(MCONF.store.module);
// Load the default challenge modules
var challenges = MCONF.challenges || gconf.challenges;
if (!challenges) {
challenges = {};
}
if (!challenges['http-01'] && !challenges['dns-01']) {
challenges['http-01'] = { module: 'acme-http-01-standalone' };
console.info(
'[default] challenges.http-01.module: ' +
challenges['http-01'].module
);
}
if (challenges['http-01']) {
if ('string' !== typeof challenges['http-01'].module) {
throw new Error(
'bad challenge http-01 module config:' +
JSON.stringify(challenges['http-01'])
);
}
P._loadSync(challenges['http-01'].module);
}
if (challenges['dns-01']) {
if ('string' !== typeof challenges['dns-01'].module) {
throw new Error(
'bad challenge dns-01 module config' +
JSON.stringify(challenges['dns-01'])
);
}
P._loadSync(challenges['dns-01'].module);
}
MCONF.challenges = challenges;
if (!MCONF.renewOffset) {
MCONF.renewOffset = gconf.renewOffset || '-45d';
console.info('[default] renewOffset: ' + MCONF.renewOffset);
}
if (!MCONF.renewStagger) {
MCONF.renewStagger = gconf.renewStagger || '3d';
console.info('[default] renewStagger: ' + MCONF.renewStagger);
}
var vers = process.versions.node.split('.');
var defaultKeyType = 'EC-P256';
if (vers[0] < 10 || (vers[0] === '10' && vers[1] < '12')) {
defaultKeyType = 'RSA-2048';
}
if (!MCONF.accountKeyType) {
MCONF.accountKeyType = gconf.accountKeyType || defaultKeyType;
console.info('[default] accountKeyType: ' + MCONF.accountKeyType);
}
if (!MCONF.serverKeyType) {
MCONF.serverKeyType = gconf.serverKeyType || 'RSA-2048';
console.info('[default] serverKeyType: ' + MCONF.serverKeyType);
}
if (!MCONF.subscriberEmail && false !== MCONF.subscriberEmail) {
MCONF.subscriberEmail =
gconf.subscriberEmail || gconf.maintainerEmail || undefined;
MCONF.agreeToTerms = gconf.agreeToTerms || undefined;
console.info('');
console.info('[default] subscriberEmail: ' + MCONF.subscriberEmail);
console.info(
'[default] agreeToTerms: ' +
(MCONF.agreeToTerms ||
gconf.agreeToTerms ||
'(show notice on use)')
);
console.info('');
}
}
function _notify(ev, args) {
if (!args) {
args = ev;
ev = args.event;
delete args.event;
}
// TODO define message types
if (!_notify._notice) {
console.info(
'set greenlockOptions.notify to override the default logger'
);
_notify._notice = true;
}
var prefix = 'Warning';
switch (ev) {
case 'error':
prefix = 'Error';
/* falls through */
case 'warning':
console.error(
prefix + '%s:',
(' ' + (args.context || '')).trimRight()
);
console.error(args.message);
if (args.description) {
console.error(args.description);
}
if (args.code) {
console.error('code:', args.code);
}
if (args.stack) {
console.error(args.stack);
}
break;
default:
if (/status/.test(ev)) {
console.info(
ev,
args.altname || args.subject || '',
args.status || ''
);
if (!args.status) {
console.info(args);
}
break;
}
console.info(
ev,
'(more info available: ' + Object.keys(args).join(' ') + ')'
);
}
}

191
greenlockrc.js Normal file
View File

@ -0,0 +1,191 @@
'use strict';
// TODO how to handle path differences when run from npx vs when required by greenlock?
var fs = require('fs');
var path = require('path');
function saveFile(rcpath, data, enc) {
// because this may have a database url or some such
fs.writeFileSync(rcpath, data, enc);
return fs.chmodSync(rcpath, parseInt('0600', 8));
}
var GRC = (module.exports = function(pkgpath, manager, rc) {
// TODO when run from package
// Run from the package root (assumed) or exit
var pkgdir = path.dirname(pkgpath);
try {
require(pkgpath);
} catch (e) {
console.error(
'npx greenlock must be run from the package root (where package.json is)'
);
process.exit(1);
}
try {
return module.exports._defaults(pkgdir, manager, rc);
} catch (e) {
if ('package.json' === e.context) {
console.error(e.desc);
process.exit(1);
}
console.error(e.message);
process.exit(1);
}
});
// Figure out what to do between what's hard-coded,
// what's in the config file, and what's left unset
module.exports.resolve = function(gconf) {
var rc = GRC.read(gconf.packageRoot);
if (gconf.configFile) {
rc = { configFile: gconf.configFile };
}
var manager;
var updates;
if (rc.manager) {
if (gconf.manager && rc.manager !== gconf.manager) {
console.warn(
'warn: ignoring hard-coded ' +
gconf.manager +
' in favor of ' +
rc.manager
);
}
gconf.manager = rc.manager;
} else if (gconf.manager) {
manager = gconf.manager;
}
if (rc.configFile) {
if (gconf.configFile && rc.configFile !== gconf.configFile) {
console.warn(
'warn: ignoring hard-coded ' +
gconf.configFile +
' in favor of ' +
rc.configFile
);
}
gconf.configFile = rc.configFile;
} else if (gconf.manager) {
updates = { configFile: gconf.configFile };
}
return GRC._defaults(gconf.packageRoot, manager, rc);
};
module.exports._defaults = function(pkgdir, manager, rc) {
var rcpath = path.join(pkgdir, '.greenlockrc');
var _rc;
var created = false;
if (manager) {
if ('.' === manager[0]) {
manager = path.resolve(pkgdir, manager);
}
try {
require(manager);
} catch (e) {
console.error('could not load ' + manager + ' from ' + pkgdir);
throw e;
}
}
var stuff = module.exports._read(pkgdir);
_rc = stuff.rc;
created = stuff.created;
var changed;
if (manager) {
if (!_rc.manager) {
_rc.manager = manager;
}
if (_rc.manager !== manager) {
console.info('Switching manager:');
var older = _rc.manager;
var newer = manager;
if ('/' === older[0]) {
older = path.relative(pkgdir, older);
}
if ('/' === newer[0]) {
newer = path.relative(pkgdir, newer);
}
console.info('\told: ' + older);
console.info('\tnew: ' + newer);
changed = true;
}
}
if (rc) {
changed = true;
Object.keys(rc).forEach(function(k) {
_rc[k] = rc[k];
});
}
if (['@greenlock/manager', 'greenlock-manager-fs'].includes(_rc.manager)) {
if (!_rc.configFile) {
changed = true;
_rc.configFile = path.join(pkgdir, 'greenlock.json');
}
}
if (!changed) {
return _rc;
}
var data = JSON.stringify(_rc, null, 2);
if (created) {
console.info('Wrote ' + rcpath);
}
saveFile(rcpath, data, 'utf8');
return _rc;
};
module.exports.read = function(pkgdir) {
return module.exports._read(pkgdir).rc;
};
module.exports._read = function(pkgdir) {
var created;
var rcpath = path.join(pkgdir, '.greenlockrc');
var _data;
try {
_data = fs.readFileSync(rcpath, 'utf8');
} catch (err) {
if ('ENOENT' !== err.code) {
throw err;
}
try {
require(path.resolve(path.join(pkgdir, './package.json')));
} catch (e) {
e.context = 'package.json';
e.desc =
'run `greenlock` from the same directory as `package.json`, or specify `packageRoot` of `.greenlockrc`';
throw e;
}
console.info('Creating ' + rcpath);
created = true;
_data = '{}';
saveFile(rcpath, _data, 'utf8');
}
var rc;
try {
rc = JSON.parse(_data);
} catch (e) {
console.error("couldn't parse " + rcpath, _data);
console.error('(perhaps you should just delete it and try again?)');
process.exit(1);
}
return {
created: created,
rc: rc
};
};

303
index.js
View File

@ -1,303 +0,0 @@
'use strict';
// TODO handle www and no-www together somehow?
var PromiseA = require('bluebird');
var crypto = require('crypto');
var tls = require('tls');
var LE = module.exports;
LE.liveServer = "https://acme-v01.api.letsencrypt.org/directory";
LE.stagingServer = "https://acme-staging.api.letsencrypt.org/directory";
LE.merge = function merge(defaults, args) {
var copy = {};
Object.keys(defaults).forEach(function (key) {
copy[key] = defaults[key];
});
Object.keys(args).forEach(function (key) {
copy[key] = args[key];
});
return copy;
};
LE.create = function (backend, defaults, handlers) {
if (!handlers) { handlers = {}; }
if (!handlers.lifetime) { handlers.lifetime = 90 * 24 * 60 * 60 * 1000; }
if (!handlers.renewWithin) { handlers.renewWithin = 3 * 24 * 60 * 60 * 1000; }
if (!handlers.memorizeFor) { handlers.memorizeFor = 1 * 24 * 60 * 60 * 1000; }
if (!handlers.sniRegisterCallback) {
handlers.sniRegisterCallback = function (args, cache, cb) {
// TODO when we have ECDSA, just do this automatically
cb(null, null);
};
}
backend = PromiseA.promisifyAll(backend);
var utils = require('./utils');
//var attempts = {}; // should exist in master process only
var ipc = {}; // in-process cache
var le;
// TODO check certs on initial load
// TODO expect that certs expire every 90 days
// TODO check certs with setInterval?
//options.cacheContextsFor = options.cacheContextsFor || (1 * 60 * 60 * 1000);
function sniCallback(hostname, cb) {
var args = LE.merge(defaults, {});
args.domains = [hostname];
le.fetch(args, function (err, cache) {
if (err) {
cb(err);
return;
}
// vazhdo is Albanian for 'continue'
function vazhdo(err, c2) {
if (err) {
cb(err);
return;
}
cache = c2 || cache;
if (!cache.context) {
cache.context = tls.createSecureContext({
key: cache.key // privkey.pem
, cert: cache.cert // fullchain.pem
//, ciphers // node's defaults are great
});
}
cb(null, cache.context);
}
if (cache) {
vazhdo();
return;
}
var args = LE.merge(defaults, { domains: [hostname] });
handlers.sniRegisterCallback(args, cache, vazhdo);
});
}
le = {
validate: function (hostnames, cb) {
// TODO check dns, etc
if ((!hostnames.length && hostnames.every(le.isValidDomain))) {
cb(new Error("node-letsencrypt: invalid hostnames: " + hostnames.join(',')));
return;
}
//
// IMPORTANT
//
// Before attempting a dynamic registration you need to validate that
//
// * these are hostnames that you expected to exist on the system
// * their A records currently point to this ip
// * this system's ip hasn't changed
//
// If you do not check these things, then someone could attack you
// and cause you, in return, to have your ip be rate-limit blocked
//
console.warn("[SECURITY WARNING]: node-letsencrypt: validate(hostnames, cb) NOT IMPLEMENTED");
cb(null, true);
}
, middleware: function () {
//console.log('[DEBUG] webrootPath', defaults.webrootPath);
var serveStatic = require('serve-static')(defaults.webrootPath, { dotfiles: 'allow' });
var prefix = '/.well-known/acme-challenge/';
return function (req, res, next) {
if (0 !== req.url.indexOf(prefix)) {
next();
return;
}
serveStatic(req, res, next);
};
}
, SNICallback: sniCallback
, sniCallback: sniCallback
, _registerHelper: function (args, cb) {
var copy = LE.merge(defaults, args);
var err;
if (!utils.isValidDomain(args.domains[0])) {
err = new Error("invalid domain");
err.code = "INVALID_DOMAIN";
cb(err);
return;
}
return le.validate(args.domains, function (err) {
if (err) {
cb(err);
return;
}
console.log("[NLE]: begin registration");
return backend.registerAsync(copy).then(function () {
console.log("[NLE]: end registration");
// calls fetch because fetch calls cacheCertInfo
return le.fetch(args, cb);
}, cb);
});
}
, _fetchHelper: function (args, cb) {
return backend.fetchAsync(args).then(function (certInfo) {
if (!certInfo) {
cb(null, null);
return;
}
var now = Date.now();
// key, cert, issuedAt, lifetime, expiresAt
if (!certInfo.expiresAt) {
certInfo.expiresAt = certInfo.issuedAt + (certInfo.lifetime || handlers.lifetime);
}
if (!certInfo.lifetime) {
certInfo.lifetime = (certInfo.lifetime || handlers.lifetime);
}
// a pretty good hard buffer
certInfo.expiresAt -= (1 * 24 * 60 * 60 * 100);
certInfo = LE.cacheCertInfo(args, certInfo, ipc, handlers);
if (now > certInfo.bestIfUsedBy && !certInfo.timeout) {
// EXPIRING
if (now > certInfo.expiresAt) {
// EXPIRED
certInfo.renewTimeout = Math.floor(certInfo.renewTimeout / 2);
}
certInfo.timeout = setTimeout(function () {
le.register(args, cb);
}, certInfo.renewTimeout);
}
cb(null, certInfo.context);
}, cb);
}
, fetch: function (args, cb) {
var hostname = args.domains[0];
// TODO don't call now() every time because this is hot code
var now = Date.now();
var certInfo = ipc[hostname];
// TODO once ECDSA is available, wait for cert renewal if its due
if (certInfo) {
if (now > certInfo.bestIfUsedBy && !certInfo.timeout) {
// EXPIRING
if (now > certInfo.expiresAt) {
// EXPIRED
certInfo.renewTimeout = Math.floor(certInfo.renewTimeout / 2);
}
certInfo.timeout = setTimeout(function () {
le.register(args, cb);
}, certInfo.renewTimeout);
}
cb(null, certInfo.context);
if ((now - certInfo.loadedAt) < (certInfo.memorizeFor)) {
// these aren't stale, so don't fall through
return;
}
}
le._fetchHelper(args, cb);
}
, register: function (args, cb) {
// this may be run in a cluster environment
// in that case it should NOT check the cache
// but ensure that it has the most fresh copy
// before attempting a renew
le._fetchHelper(args, function (err, hit) {
var hostname = args.domains[0];
if (err) {
cb(err);
return;
}
else if (hit) {
cb(null, hit);
return;
}
return le._registerHelper(args, function (err) {
if (err) {
cb(err);
return;
}
le._fetchHelper(args, function (err, cache) {
if (cache) {
cb(null, cache.context);
return;
}
// still couldn't read the certs after success... that's weird
cb(err, null);
});
}, function (err) {
console.error("[Error] Let's Encrypt failed:");
console.error(err.stack || new Error(err.message || err.toString()).stack);
// wasn't successful with lets encrypt, don't automatically try again for 12 hours
// TODO what's the better way to handle this?
// failure callback?
ipc[hostname] = {
context: null // TODO default context
, issuedAt: Date.now()
, lifetime: (12 * 60 * 60 * 1000)
// , expiresAt: generated in next step
};
cb(err, ipc[hostname]);
});
});
}
};
return le;
};
LE.cacheCertInfo = function (args, certInfo, ipc, handlers) {
// TODO IPC via process and worker to guarantee no races
// rather than just "really good odds"
var hostname = args.domains[0];
var now = Date.now();
// Stagger randomly by plus 0% to 25% to prevent all caches expiring at once
var rnd1 = (crypto.randomBytes(1)[0] / 255);
var memorizeFor = Math.floor(handlers.memorizeFor + ((handlers.memorizeFor / 4) * rnd1));
// Stagger randomly to renew between n and 2n days before renewal is due
// this *greatly* reduces the risk of multiple cluster processes renewing the same domain at once
var rnd2 = (crypto.randomBytes(1)[0] / 255);
var bestIfUsedBy = certInfo.expiresAt - (handlers.renewWithin + Math.floor(handlers.renewWithin * rnd2));
// Stagger randomly by plus 0 to 5 min to reduce risk of multiple cluster processes
// renewing at once on boot when the certs have expired
var rnd3 = (crypto.randomBytes(1)[0] / 255);
var renewTimeout = Math.floor((5 * 60 * 1000) * rnd3);
certInfo.context = tls.createSecureContext({
key: certInfo.key
, cert: certInfo.cert
//, ciphers // node's defaults are great
});
certInfo.loadedAt = now;
certInfo.memorizeFor = memorizeFor;
certInfo.bestIfUsedBy = bestIfUsedBy;
certInfo.renewTimeout = renewTimeout;
ipc[hostname] = certInfo;
return ipc[hostname];
};

88
lib/challenges-wrapper.js Normal file
View File

@ -0,0 +1,88 @@
'use strict';
var Greenlock = require('../');
module.exports.wrap = function(greenlock) {
greenlock.challenges = {};
greenlock.challenges.get = async function(chall) {
// TODO pick one and warn on the others
// (just here due to some backwards compat issues with early v3 plugins)
var servername =
chall.servername ||
chall.altname ||
(chall.identifier && chall.identifier.value);
// TODO some sort of caching to prevent database hits?
var site = await greenlock._config({ servername: servername });
if (!site) {
return null;
}
// Hmm... this _should_ be impossible
if (!site.challenges || !site.challenges['http-01']) {
var copy = JSON.parse(JSON.stringify(site));
sanitizeCopiedConf(copy);
sanitizeCopiedConf(copy.store);
if (site.challenges) {
sanitizeCopiedConf(copy.challenges['http-01']);
sanitizeCopiedConf(copy.challenges['dns-01']);
sanitizeCopiedConf(copy.challenges['tls-alpn-01']);
}
console.warn('[Bug] Please report this error:');
console.warn(
'\terror: http-01 challenge requested, but not even a default http-01 config exists'
);
console.warn('\tservername:', JSON.stringify(servername));
console.warn('\tsite:', JSON.stringify(copy));
return null;
}
var plugin = await Greenlock._loadChallenge(site.challenges, 'http-01');
if (!plugin) {
return null;
}
var keyAuth;
var keyAuthDigest;
var result = await plugin.get({
challenge: {
type: chall.type,
//hostname: chall.servername,
altname: chall.servername,
identifier: { value: chall.servername },
token: chall.token
}
});
if (result) {
// backwards compat that shouldn't be dropped
// because new v3 modules had to do this to be
// backwards compatible with Greenlock v2.7 at
// the time.
if (result.challenge) {
result = result.challenge;
}
keyAuth = result.keyAuthorization;
keyAuthDigest = result.keyAuthorizationDigest;
}
if (/dns/.test(chall.type)) {
return { keyAuthorizationDigest: keyAuthDigest };
}
return { keyAuthorization: keyAuth };
};
};
function sanitizeCopiedConf(copy) {
if (!copy) {
return;
}
Object.keys(copy).forEach(function(k) {
if (/(api|key|token)/i.test(k) && 'string' === typeof copy[k]) {
copy[k] = '**redacted**';
}
});
return copy;
}

46
lib/directory-url.js Normal file
View File

@ -0,0 +1,46 @@
var DIR = module.exports;
// This will ALWAYS print out a notice if the URL is clearly a staging URL
DIR._getDirectoryUrl = function(dirUrl, domain) {
var liveUrl = 'https://acme-v02.api.letsencrypt.org/directory';
dirUrl = DIR._getDefaultDirectoryUrl(dirUrl, '', domain);
if (!dirUrl) {
dirUrl = liveUrl;
// This will print out a notice (just once) if no directoryUrl has been supplied
if (!DIR._shownDirectoryUrl) {
DIR._shownDirectoryUrl = true;
console.info('ACME Directory URL:', dirUrl);
}
}
return dirUrl;
};
// Handle staging URLs, pebble test server, etc
DIR._getDefaultDirectoryUrl = function(dirUrl, staging, domain) {
var stagingUrl = 'https://acme-staging-v02.api.letsencrypt.org/directory';
var stagingRe = /(^http:|staging|^127\.0\.|^::|localhost)/;
var env = '';
var args = [];
if ('undefined' !== typeof process) {
env = (process.env && process.env.ENV) || '';
args = (process.argv && process.argv.slice(1)) || [];
}
if (
staging ||
stagingRe.test(dirUrl) ||
args.includes('--staging') ||
/DEV|STAG/i.test(env)
) {
if (!stagingRe.test(dirUrl)) {
dirUrl = stagingUrl;
}
console.info('[staging] ACME Staging Directory URL:', dirUrl, env);
console.warn('FAKE CERTIFICATES (for testing) only', env, domain);
console.warn('');
}
return dirUrl;
};
DIR._shownDirectoryUrl = false;

194
lib/init.js Normal file
View File

@ -0,0 +1,194 @@
'use strict';
var Init = module.exports;
var fs = require('fs');
var path = require('path');
//var promisify = require("util").promisify;
Init._init = function(opts) {
//var Rc = require("@root/greenlock/rc");
var Rc = require('./rc.js');
var pkgText;
var pkgErr;
var msgErr;
//var emailErr;
var realPkg;
var userPkg;
var myPkg = {};
// we want to be SUPER transparent that we're reading from package.json
// we don't want anything unexpected
var implicitConfig = [];
var rc;
if (opts.packageRoot) {
try {
pkgText = fs.readFileSync(
path.resolve(opts.packageRoot, 'package.json'),
'utf8'
);
opts._hasPackage = true;
} catch (e) {
pkgErr = e;
if (opts._mustPackage) {
console.error(
'Should be run from package root (the same directory as `package.json`)'
);
process.exit(1);
return;
}
console.warn(
'`packageRoot` should be the root of the package (probably `__dirname`)'
);
}
}
if (pkgText) {
try {
realPkg = JSON.parse(pkgText);
} catch (e) {
pkgErr = e;
}
}
userPkg = opts.package;
if (realPkg || userPkg) {
userPkg = userPkg || {};
realPkg = realPkg || {};
// build package agent
if (!opts.packageAgent) {
// name
myPkg.name = userPkg.name;
if (!myPkg.name) {
myPkg.name = realPkg.name;
implicitConfig.push('name');
}
// version
myPkg.version = userPkg.version;
if (!myPkg.version) {
myPkg.version = realPkg.version;
implicitConfig.push('version');
}
if (myPkg.name && myPkg.version) {
opts.packageAgent = myPkg.name + '/' + myPkg.version;
}
}
// build author
myPkg.author = opts.maintainerEmail;
if (!myPkg.author) {
myPkg.author =
(userPkg.author && userPkg.author.email) || userPkg.author;
}
if (!myPkg.author) {
implicitConfig.push('author');
myPkg.author =
(realPkg.author && realPkg.author.email) || realPkg.author;
}
if (!opts._init) {
opts.maintainerEmail = myPkg.author;
}
}
if (!opts.packageAgent) {
msgErr =
'missing `packageAgent` and also failed to read `name` and/or `version` from `package.json`';
if (pkgErr) {
msgErr += ': ' + pkgErr.message;
}
throw new Error(msgErr);
}
if (!opts._init) {
opts.maintainerEmail = parseMaintainer(opts.maintainerEmail);
if (!opts.maintainerEmail) {
msgErr =
'missing or malformed `maintainerEmail` (or `author` from `package.json`), which is used as the contact for support notices';
throw new Error(msgErr);
}
}
if (opts.packageRoot) {
// Place the rc file in the packageroot
rc = Rc._initSync(opts.packageRoot, opts.manager, opts.configDir);
opts.configDir = rc.configDir;
opts.manager = rc.manager;
}
if (!opts.configDir && !opts.manager) {
throw new Error(
'missing `packageRoot` and `configDir`, but no `manager` was supplied'
);
}
opts.configFile = path.join(
path.resolve(opts.packageRoot, opts.configDir),
'config.json'
);
var config;
try {
config = JSON.parse(fs.readFileSync(opts.configFile));
} catch (e) {
if ('ENOENT' !== e.code) {
throw e;
}
config = { defaults: {} };
}
opts.manager =
rc.manager ||
(config.defaults && config.defaults.manager) ||
config.manager;
if (!opts.manager) {
opts.manager = '@greenlock/manager';
}
if ('string' === typeof opts.manager) {
opts.manager = {
module: opts.manager
};
}
opts.manager = JSON.parse(JSON.stringify(opts.manager));
var confconf = ['configDir', 'configFile', 'staging', 'directoryUrl'];
Object.keys(opts).forEach(function(k) {
if (!confconf.includes(k)) {
return;
}
if ('undefined' !== typeof opts.manager[k]) {
return;
}
opts.manager[k] = opts[k];
});
/*
var ignore = ["packageRoot", "maintainerEmail", "packageAgent", "staging", "directoryUrl", "manager"];
Object.keys(opts).forEach(function(k) {
if (ignore.includes(k)) {
return;
}
opts.manager[k] = opts[k];
});
*/
// Place the rc file in the configDir itself
//Rc._initSync(opts.configDir, opts.configDir);
return opts;
};
// ex: "John Doe <john@example.com> (https://john.doe)"
// ex: "John Doe <john@example.com>"
// ex: "<john@example.com>"
// ex: "john@example.com"
var looseEmailRe = /(^|[\s<])([^'" <>:;`]+@[^'" <>:;`]+\.[^'" <>:;`]+)/;
function parseMaintainer(maintainerEmail) {
try {
maintainerEmail = maintainerEmail.match(looseEmailRe)[2];
} catch (e) {
maintainerEmail = null;
}
return maintainerEmail;
}

645
lib/manager-wrapper.js Normal file
View File

@ -0,0 +1,645 @@
'use strict';
var U = require('../utils.js');
var E = require('../errors.js');
var warned = {};
// The purpose of this file is to try to auto-build
// partial managers so that the external API can be smaller.
module.exports.wrap = function(greenlock, gconf) {
var myFind = gconf.find;
delete gconf.find;
var mega = mergeManager(greenlock, gconf);
greenlock.manager = {};
greenlock.sites = {};
//greenlock.accounts = {};
//greenlock.certs = {};
greenlock.manager._modulename = gconf.manager.module;
if ('/' === String(gconf.manager.module)[0]) {
greenlock.manager._modulename = require('path').relative(
gconf.packageRoot,
greenlock.manager._modulename
);
if ('.' !== String(greenlock.manager._modulename)[0]) {
greenlock.manager._modulename =
'./' + greenlock.manager._modulename;
}
}
var allowed = [
'accountKeyType', //: ["P-256", "RSA-2048"],
'serverKeyType', //: ["RSA-2048", "P-256"],
'store', // : { module, specific opts },
'challenges', // : { "http-01", "dns-01", "tls-alpn-01" },
'subscriberEmail',
'agreeToTerms',
'agreeTos',
'customerEmail',
'renewOffset',
'renewStagger',
'module', // not allowed, just ignored
'manager'
];
// get / set default site settings such as
// subscriberEmail, store, challenges, renewOffset, renewStagger
greenlock.manager.defaults = function(conf) {
return greenlock._init().then(function() {
if (!conf) {
return mega.defaults();
}
if (conf.sites) {
throw new Error('cannot set sites as global config');
}
if (conf.routes) {
throw new Error('cannot set routes as global config');
}
// disallow keys we know to be bad
[
'subject',
'deletedAt',
'altnames',
'lastAttemptAt',
'expiresAt',
'issuedAt',
'renewAt',
'sites',
'routes'
].some(function(k) {
if (k in conf) {
throw new Error(
'`' + k + '` not allowed as a default setting'
);
}
});
Object.keys(conf).forEach(function(k) {
if (!allowed.includes(k) && !warned[k]) {
warned[k] = true;
console.warn(
k +
" isn't a known key. Please open an issue and let us know the use case."
);
}
});
Object.keys(conf).forEach(function(k) {
if (-1 !== ['module', 'manager'].indexOf(k)) {
return;
}
if ('undefined' === typeof k) {
throw new Error(
"'" +
k +
"' should be set to a value, or `null`, but not left `undefined`"
);
}
});
return mega.defaults(conf);
});
};
greenlock.manager._defaults = function(opts) {
return mega.defaults(opts);
};
greenlock.manager.add = function(args) {
if (!args || !Array.isArray(args.altnames) || !args.altnames.length) {
throw new Error(
'you must specify `altnames` when adding a new site'
);
}
if (args.renewAt) {
throw new Error(
'you cannot specify `renewAt` when adding a new site'
);
}
return greenlock.manager.set(args);
};
// TODO agreeToTerms should be handled somewhere... maybe?
// Add and update remains because I said I had locked the API
greenlock.manager.set = greenlock.manager.update = function(args) {
return greenlock._init().then(function() {
// The goal is to make this decently easy to manage by hand without mistakes
// but also reasonably easy to error check and correct
// and to make deterministic auto-corrections
args.subject = checkSubject(args);
//var subscriberEmail = args.subscriberEmail;
// TODO shortcut the other array checks when not necessary
if (Array.isArray(args.altnames)) {
args.altnames = checkAltnames(args.subject, args);
}
// at this point we know that subject is the first of altnames
return Promise.all(
(args.altnames || []).map(function(d) {
d = d.replace('*.', '');
return U._validDomain(d);
})
).then(function() {
if (!U._uniqueNames(args.altnames || [])) {
throw E.NOT_UNIQUE(
'add',
"'" + args.altnames.join("' '") + "'"
);
}
// durations
if (args.renewOffset) {
args.renewOffset = U._parseDuration(args.renewOffset);
}
if (args.renewStagger) {
args.renewStagger = U._parseDuration(args.renewStagger);
}
return mega.set(args).then(function(result) {
if (!gconf._bin_mode) {
greenlock.renew({}).catch(function(err) {
if (!err.context) {
err.contxt = 'renew';
}
greenlock._notify('error', err);
});
}
return result;
});
});
});
};
greenlock.manager.get = greenlock.sites.get = function(args) {
return Promise.resolve().then(function() {
if (args.subject) {
throw new Error(
'get({ servername }) searches certificates by altnames, not by subject specifically'
);
}
if (args.servernames || args.altnames || args.renewBefore) {
throw new Error(
'get({ servername }) does not take arguments that could lead to multiple results'
);
}
return mega.get(args);
});
};
greenlock.manager.remove = function(args) {
return Promise.resolve().then(function() {
args.subject = checkSubject(args);
if (args.servername) {
throw new Error(
'remove() should be called with `subject` only, if you wish to remove altnames use `update()`'
);
}
if (args.altnames) {
throw new Error(
'remove() should be called with `subject` only, not `altnames`'
);
}
// TODO check no altnames
return mega.remove(args);
});
};
/*
{
subject: site.subject,
altnames: site.altnames,
//issuedAt: site.issuedAt,
//expiresAt: site.expiresAt,
renewOffset: site.renewOffset,
renewStagger: site.renewStagger,
renewAt: site.renewAt,
subscriberEmail: site.subscriberEmail,
customerEmail: site.customerEmail,
challenges: site.challenges,
store: site.store
};
*/
// no transaction promise here because it calls set
greenlock._find = async function(args) {
args = _mangleFindArgs(args);
var ours = await mega.find(args);
if (!myFind) {
return ours;
}
// if the user has an overlay find function we'll do a diff
// between the managed state and the overlay, and choose
// what was found.
var theirs = await myFind(args);
theirs = theirs.filter(function(site) {
if (!site || 'string' !== typeof site.subject) {
throw new Error('found site is missing subject');
}
if (
!Array.isArray(site.altnames) ||
!site.altnames.length ||
!site.altnames[0] ||
site.altnames[0] !== site.subject
) {
throw new Error('missing or malformed altnames');
}
['renewAt', 'issuedAt', 'expiresAt'].forEach(function(k) {
if (site[k]) {
throw new Error(
'`' +
k +
'` should be updated by `set()`, not by `find()`'
);
}
});
if (!site) {
return;
}
if (args.subject && site.subject !== args.subject) {
return false;
}
var servernames = args.servernames || args.altnames;
if (
servernames &&
!site.altnames.some(function(altname) {
return servernames.includes(altname);
})
) {
return false;
}
return site.renewAt < (args.renewBefore || Infinity);
});
return _mergeFind(ours, theirs);
};
function _mergeFind(ours, theirs) {
var toUpdate = [];
theirs.forEach(function(_newer) {
var hasCurrent = ours.some(function(_older) {
var changed = false;
if (_newer.subject !== _older.subject) {
return false;
}
// BE SURE TO SET THIS UNDEFINED AFTERWARDS
_older._exists = true;
_newer.deletedAt = _newer.deletedAt || 0;
Object.keys(_newer).forEach(function(k) {
if (_older[k] !== _newer[k]) {
changed = true;
_older[k] = _newer[k];
}
});
if (changed) {
toUpdate.push(_older);
}
// handled the (only) match
return true;
});
if (!hasCurrent) {
toUpdate.push(_newer);
}
});
// delete the things that are gone
ours.forEach(function(_older) {
if (!_older._exists) {
_older.deletedAt = Date.now();
toUpdate.push(_older);
}
_older._exists = undefined;
});
Promise.all(
toUpdate.map(function(site) {
return greenlock.sites.update(site).catch(function(err) {
console.error(
'Developer Error: cannot update sites from user-supplied `find()`:'
);
console.error(err);
});
})
);
// ours is updated from theirs
return ours;
}
greenlock.manager.init = mega.init;
};
function checkSubject(args) {
if (!args || !args.subject) {
throw new Error('you must specify `subject` when configuring a site');
}
/*
if (!args.subject) {
throw E.NO_SUBJECT('add');
}
*/
var subject = (args.subject || '').toLowerCase();
if (subject !== args.subject) {
console.warn('`subject` must be lowercase', args.subject);
}
return U._encodeName(subject);
}
function checkAltnames(subject, args) {
// the things we have to check and get right
var altnames = (args.altnames || []).map(function(name) {
return String(name || '').toLowerCase();
});
// punycode BEFORE validation
// (set, find, remove)
if (altnames.join() !== args.altnames.join()) {
console.warn(
'all domains in `altnames` must be lowercase:',
args.altnames
);
}
args.altnames = args.altnames.map(U._encodeName);
if (
!args.altnames.every(function(d) {
return U._validName(d);
})
) {
throw E.INVALID_HOSTNAME('add', "'" + args.altnames.join("' '") + "'");
}
if (subject && subject !== args.altnames[0]) {
throw E.BAD_ORDER(
'add',
'(' + args.subject + ") '" + args.altnames.join("' '") + "'"
);
}
/*
if (subject && subject !== altnames[0]) {
throw new Error(
'`subject` must be the first domain in `altnames`',
args.subject,
altnames.join(' ')
);
}
*/
return altnames;
}
function loadManager(gconf) {
var m;
// 1. Get the manager
// 2. Figure out if we need to wrap it
/*
if (!gconf.manager) {
gconf.manager = '@greenlock/manager';
}
if ('string' !== typeof gconf.manager) {
throw new Error(
'`manager` should be a string representing the npm name or file path of the module'
);
}
*/
try {
// wrap this to be safe for @greenlock/manager
m = require(gconf.manager.module).create(gconf.manager);
} catch (e) {
console.error('Error loading manager:');
console.error(e.code);
console.error(e.message);
}
if (!m) {
console.error();
console.error(
'Failed to load manager plugin ',
JSON.stringify(gconf.manager)
);
console.error();
process.exit(1);
}
return m;
}
function mergeManager(greenlock, gconf) {
var mng;
function m() {
if (mng) {
return mng;
}
mng = require('@greenlock/manager').create(gconf);
return mng;
}
var mini = loadManager(gconf);
var mega = {};
// optional
if (mini.defaults) {
mega.defaults = function(opts) {
return mini.defaults(opts);
};
} else {
mega.defaults = m().defaults;
}
// optional
if (mini.remove) {
mega.remove = function(opts) {
return mini.remove(opts);
};
} else {
mega.remove = function(opts) {
mega.get(opts).then(function(site) {
if (!site) {
return null;
}
site.deletedAt = Date.now();
return mega.set(site).then(function() {
return site;
});
});
};
}
if (mini.find) {
// without this there cannot be fully automatic renewal
mega.find = function(opts) {
return mini.find(opts);
};
}
// set and (find and/or get) should be from the same set
if (mini.set) {
mega.set = function(opts) {
if (!mini.find) {
// TODO create the list so that find can be implemented
}
return mini.set(opts);
};
} else {
mega.set = m().set;
mega.get = m().get;
}
if (mini.get) {
mega.get = async function(opts) {
if (mini.set) {
return mini.get(opts);
}
if (!mega._get) {
mega._get = m().get;
}
var existing = await mega._get(opts);
var site = await mini.get(opts);
if (!existing) {
// Add
if (!site) {
return;
}
site.renewAt = 1;
site.deletedAt = 0;
await mega.set(site);
existing = await mega._get(opts);
} else if (!site) {
// Delete
existing.deletedAt = site.deletedAt || Date.now();
await mega.set(existing);
existing = null;
} else if (
site.subject !== existing.subject ||
site.altnames.join(' ') !== existing.altnames.join(' ')
) {
// Update
site.renewAt = 1;
site.deletedAt = 0;
await mega.set(site);
existing = await mega._get(opts);
if (!existing) {
throw new Error('failed to `get` after `set`');
}
}
return existing;
};
} else if (mini.find) {
mega.get = function(opts) {
var servername = opts.servername;
delete opts.servername;
opts.servernames = (servername && [servername]) || undefined;
return mini.find(opts).then(function(sites) {
return sites.filter(function(site) {
return site.altnames.include(servername);
})[0];
});
};
} else if (mini.set) {
throw new Error(
gconf.manager.module +
' implements `set()`, but not `get()` or `find()`'
);
} else {
mega.find = m().find;
mega.get = m().get;
}
if (!mega.find) {
mega._nofind = false;
mega.find = async function(opts) {
if (!mega._nofind) {
console.warn(
'Warning: manager `' +
greenlock.manager._modulename +
'` does not implement `find({})`\n'
);
mega._nofind = true;
}
return [];
};
}
if (!mega.get) {
mega.get = function(opts) {
var servername = opts.servername;
delete opts.servername;
opts.servernames = (servername && [servername]) || undefined;
return mega.find(opts).then(function(sites) {
return sites.filter(function(site) {
return site.altnames.include(servername);
})[0];
});
};
}
mega.init = function(deps) {
if (mini.init) {
return mini.init(deps).then(function() {
if (mng) {
return mng.init(deps);
}
});
} else if (mng) {
return mng.init(deps);
} else {
return Promise.resolve(null);
}
};
return mega;
}
function _mangleFindArgs(args) {
var servernames = (args.servernames || [])
.concat(args.altnames || [])
.filter(Boolean)
.slice(0);
var modified = servernames.slice(0);
// servername, wildname, and altnames are all the same
['wildname', 'servername'].forEach(function(k) {
var altname = args[k] || '';
if (altname && !modified.includes(altname)) {
modified.push(altname);
}
});
if (modified.length) {
servernames = modified;
servernames = servernames.map(U._encodeName);
args.altnames = servernames;
args.servernames = args.altnames = checkAltnames(false, args);
}
// documented as args.servernames
// preserved as args.altnames for v3 beta backwards compat
// my only hesitancy in this choice is that a "servername"
// may NOT contain '*.', in which case `altnames` is a better choice.
// However, `altnames` is ambiguous - as if it means to find a
// certificate by that specific collection of altnames.
// ... perhaps `domains` could work?
return args;
}

77
lib/rc.js Normal file
View File

@ -0,0 +1,77 @@
'use strict';
var Rc = module.exports;
var fs = require('fs');
var path = require('path');
// This is only called if packageRoot is specified
// (which it should be most of the time)
Rc._initSync = function(dirname, manager, configDir) {
if (!dirname) {
return {};
}
// dirname / opts.packageRoot
var rcpath = path.resolve(dirname, '.greenlockrc');
var rc;
try {
rc = JSON.parse(fs.readFileSync(rcpath));
} catch (e) {
if ('ENOENT' !== e.code) {
throw e;
}
rc = {};
}
var changed = true;
// In the general case the manager should be specified in the
// config file, which is in the config dir, but for the specific
// case in which all custom plugins are being used and no config
// dir is needed, we allow the manager to be read from the rc.
// ex: manager: { module: 'name', xxxx: 'xxxx' }
if (manager) {
if (rc.manager) {
if (
('string' === typeof rc.manager && rc.manager !== manager) ||
('string' !== typeof rc.manager &&
rc.manager.module !== manager.module)
) {
changed = true;
console.info(
"changing `manager` from '%s' to '%s'",
rc.manager.module || rc.manager,
manager.module || manager
);
}
}
rc.manager = manager;
}
if (!configDir) {
configDir = rc.configDir;
}
if (configDir && configDir !== rc.configDir) {
if (rc.configDir) {
console.info(
"changing `configDir` from '%s' to '%s'",
rc.configDir,
configDir
);
}
changed = true;
rc.configDir = configDir;
} else if (!rc.configDir) {
changed = true;
configDir = './greenlock.d';
rc.configDir = configDir;
}
if (changed) {
fs.writeFileSync(rcpath, JSON.stringify(rc));
}
return rc;
};

BIN
logo/beaker-browser-301x112.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

BIN
logo/greenlock-1063x250.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

BIN
logo/greenlock-850x200.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

BIN
logo/ibm-301x112.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
logo/telebit-301x112.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

95
order.js Normal file
View File

@ -0,0 +1,95 @@
var accountKeypair = await Keypairs.generate({ kty: accKty });
if (config.debug) {
console.info('Account Key Created');
console.info(JSON.stringify(accountKeypair, null, 2));
console.info();
console.info();
}
var account = await acme.accounts.create({
agreeToTerms: agree,
// TODO detect jwk/pem/der?
accountKeypair: { privateKeyJwk: accountKeypair.private },
subscriberEmail: config.email
});
// TODO top-level agree
function agree(tos) {
if (config.debug) {
console.info('Agreeing to Terms of Service:');
console.info(tos);
console.info();
console.info();
}
agreed = true;
return Promise.resolve(tos);
}
if (config.debug) {
console.info('New Subscriber Account');
console.info(JSON.stringify(account, null, 2));
console.info();
console.info();
}
if (!agreed) {
throw new Error('Failed to ask the user to agree to terms');
}
var certKeypair = await Keypairs.generate({ kty: srvKty });
var pem = await Keypairs.export({
jwk: certKeypair.private,
encoding: 'pem'
});
if (config.debug) {
console.info('Server Key Created');
console.info('privkey.jwk.json');
console.info(JSON.stringify(certKeypair, null, 2));
// This should be saved as `privkey.pem`
console.info();
console.info('privkey.' + srvKty.toLowerCase() + '.pem:');
console.info(pem);
console.info();
}
// 'subject' should be first in list
var domains = randomDomains(rnd);
if (config.debug) {
console.info('Get certificates for random domains:');
console.info(
domains
.map(function(puny) {
var uni = punycode.toUnicode(puny);
if (puny !== uni) {
return puny + ' (' + uni + ')';
}
return puny;
})
.join('\n')
);
console.info();
}
// Create CSR
var csrDer = await CSR.csr({
jwk: certKeypair.private,
domains: domains,
encoding: 'der'
});
var csr = Enc.bufToUrlBase64(csrDer);
var csrPem = PEM.packBlock({
type: 'CERTIFICATE REQUEST',
bytes: csrDer /* { jwk: jwk, domains: opts.domains } */
});
if (config.debug) {
console.info('Certificate Signing Request');
console.info(csrPem);
console.info();
}
var results = await acme.certificates.create({
account: account,
accountKeypair: { privateKeyJwk: accountKeypair.private },
csr: csr,
domains: domains,
challenges: challenges, // must be implemented
customerEmail: null
});

140
package-lock.json generated Normal file
View File

@ -0,0 +1,140 @@
{
"name": "@root/greenlock",
"version": "4.0.5",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@greenlock/manager": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@greenlock/manager/-/manager-3.1.0.tgz",
"integrity": "sha512-PBy5CMK+j4oD7sj7hF5qE+xKEOSiiuL2hHd5X5ttEbtnTSDKjNeqbrR5k2ZddwVNdjOVeBIeuqlm81IFZ+Ftew==",
"requires": {
"greenlock-manager-fs": "^3.1.0"
},
"dependencies": {
"greenlock-manager-fs": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/greenlock-manager-fs/-/greenlock-manager-fs-3.1.1.tgz",
"integrity": "sha512-np6qdnPIOZx40PAcSQcqK1eMPWjTKxsxcgRd/OVg0ai49WC1Ds74CTrwmB84pq2n53ikbnDBQFmKEQ4AC0DK8w==",
"requires": {
"@root/mkdirp": "^1.0.0",
"safe-replace": "^1.1.0"
}
}
}
},
"@root/acme": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@root/acme/-/acme-3.1.0.tgz",
"integrity": "sha512-GAyaW63cpSYd2KvVp5lHLbCWeEhJPKZK9nsJvZJOKsD9Uv88KEttn4FpDZEJ+2q3Jsey0DWpuQ2I4ft0JV9p2w==",
"requires": {
"@root/csr": "^0.8.1",
"@root/encoding": "^1.0.1",
"@root/keypairs": "^0.10.0",
"@root/pem": "^1.0.4",
"@root/request": "^1.6.1",
"@root/x509": "^0.7.2"
},
"dependencies": {
"@root/request": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/@root/request/-/request-1.6.1.tgz",
"integrity": "sha512-8wrWyeBLRp7T8J36GkT3RODJ6zYmL0/maWlAUD5LOXT28D3TDquUepyYDKYANNA3Gc8R5ZCgf+AXvSTYpJEWwQ=="
}
}
},
"@root/asn1": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@root/asn1/-/asn1-1.0.0.tgz",
"integrity": "sha512-0lfZNuOULKJDJmdIkP8V9RnbV3XaK6PAHD3swnFy4tZwtlMDzLKoM/dfNad7ut8Hu3r91wy9uK0WA/9zym5mig==",
"requires": {
"@root/encoding": "^1.0.1"
}
},
"@root/csr": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@root/csr/-/csr-0.8.1.tgz",
"integrity": "sha512-hKl0VuE549TK6SnS2Yn9nRvKbFZXn/oAg+dZJU/tlKl/f/0yRXeuUzf8akg3JjtJq+9E592zDqeXZ7yyrg8fSQ==",
"requires": {
"@root/asn1": "^1.0.0",
"@root/pem": "^1.0.4",
"@root/x509": "^0.7.2"
}
},
"@root/encoding": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@root/encoding/-/encoding-1.0.1.tgz",
"integrity": "sha512-OaEub02ufoU038gy6bsNHQOjIn8nUjGiLcaRmJ40IUykneJkIW5fxDqKxQx48cszuNflYldsJLPPXCrGfHs8yQ=="
},
"@root/keypairs": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/@root/keypairs/-/keypairs-0.10.0.tgz",
"integrity": "sha512-t8VocY46Mtb0NTsxzyLLf5tsgfw0BXLYVADAyiRdEdqHcvPFGJdjkXNtHVQuSV/FMaC65iTOHVP4E6X8iT3Ikg==",
"requires": {
"@root/encoding": "^1.0.1",
"@root/pem": "^1.0.4",
"@root/x509": "^0.7.2"
}
},
"@root/mkdirp": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@root/mkdirp/-/mkdirp-1.0.0.tgz",
"integrity": "sha512-hxGAYUx5029VggfG+U9naAhQkoMSXtOeXtbql97m3Hi6/sQSRL/4khKZPyOF6w11glyCOU38WCNLu9nUcSjOfA=="
},
"@root/pem": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@root/pem/-/pem-1.0.4.tgz",
"integrity": "sha512-rEUDiUsHtild8GfIjFE9wXtcVxeS+ehCJQBwbQQ3IVfORKHK93CFnRtkr69R75lZFjcmKYVc+AXDB+AeRFOULA=="
},
"@root/request": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/@root/request/-/request-1.6.1.tgz",
"integrity": "sha512-8wrWyeBLRp7T8J36GkT3RODJ6zYmL0/maWlAUD5LOXT28D3TDquUepyYDKYANNA3Gc8R5ZCgf+AXvSTYpJEWwQ=="
},
"@root/x509": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/@root/x509/-/x509-0.7.2.tgz",
"integrity": "sha512-ENq3LGYORK5NiMFHEVeNMt+fTXaC7DTS6sQXoqV+dFdfT0vmiL5cDLjaXQhaklJQq0NiwicZegzJRl1ZOTp3WQ==",
"requires": {
"@root/asn1": "^1.0.0",
"@root/encoding": "^1.0.1"
}
},
"acme-http-01-standalone": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/acme-http-01-standalone/-/acme-http-01-standalone-3.0.5.tgz",
"integrity": "sha512-W4GfK+39GZ+u0mvxRVUcVFCG6gposfzEnSBF20T/NUwWAKG59wQT1dUbS1NixRIAsRuhpGc4Jx659cErFQH0Pg=="
},
"cert-info": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/cert-info/-/cert-info-1.5.1.tgz",
"integrity": "sha512-eoQC/yAgW3gKTKxjzyClvi+UzuY97YCjcl+lSqbsGIy7HeGaWxCPOQFivhUYm27hgsBMhsJJFya3kGvK6PMIcQ=="
},
"dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
"integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==",
"dev": true
},
"greenlock-store-fs": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/greenlock-store-fs/-/greenlock-store-fs-3.2.2.tgz",
"integrity": "sha512-92ejLB4DyV4qv/2b6VLGF2nKfYQeIfg3o+e/1cIoYLjlIaUFdbBXkzLTRozFlHsQPZt2ALi5qYrpC9IwH7GK8A==",
"requires": {
"@root/mkdirp": "^1.0.0",
"safe-replace": "^1.1.0"
}
},
"punycode": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=",
"dev": true
},
"safe-replace": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/safe-replace/-/safe-replace-1.1.0.tgz",
"integrity": "sha512-9/V2E0CDsKs9DWOOwJH7jYpSl9S3N05uyevNjvsnDauBqRowBPOyot1fIvV5N2IuZAbYyvrTXrYFVG0RZInfFw=="
}
}
}

View File

@ -1,41 +1,56 @@
{ {
"name": "letsencrypt", "name": "@root/greenlock",
"version": "1.0.2", "version": "4.0.5",
"description": "Let's Encrypt for node.js on npm", "description": "The easiest Let's Encrypt client for Node.js and Browsers",
"main": "index.js", "homepage": "https://rootprojects.org/greenlock/",
"main": "greenlock.js",
"browser": {},
"bin": {
"greenlock": "bin/greenlock.js"
},
"files": [
"*.js",
"lib",
"bin",
"dist"
],
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "build": "nodex bin/bundle.js",
"lint": "jshint lib bin",
"test": "node server.js",
"start": "node server.js"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/Daplie/node-letsencrypt.git" "url": "https://git.rootprojects.org/root/greenlock.js.git"
}, },
"keywords": [ "keywords": [
"letsencrypt",
"letsencrypt.org",
"le",
"Let's Encrypt", "Let's Encrypt",
"lejs", "ACME",
"le.js", "browser",
"acme", "EC",
"node", "RSA",
"nodejs", "CSR",
"node.js", "greenlock",
"client" "VanillaJS",
"ZeroSSL"
], ],
"author": "AJ ONeal <coolaj86@gmail.com> (https://coolaj86.com/)", "author": "AJ ONeal <coolaj86@gmail.com> (https://coolaj86.com/)",
"license": "(MIT OR Apache-2.0)", "license": "MPL-2.0",
"bugs": {
"url": "https://github.com/Daplie/node-letsencrypt/issues"
},
"homepage": "https://github.com/Daplie/node-letsencrypt#readme",
"devDependencies": {
"express": "^4.13.3",
"localhost.daplie.com-certificates": "^1.1.2"
},
"dependencies": { "dependencies": {
"bluebird": "^3.0.6", "@greenlock/manager": "^3.1.0",
"letsencrypt-python": "^1.0.3", "@root/acme": "^3.1.0",
"serve-static": "^1.10.0" "@root/csr": "^0.8.1",
"@root/keypairs": "^0.10.0",
"@root/mkdirp": "^1.0.0",
"@root/request": "^1.6.1",
"acme-http-01-standalone": "^3.0.5",
"cert-info": "^1.5.1",
"greenlock-store-fs": "^3.2.2",
"safe-replace": "^1.1.0"
},
"devDependencies": {
"dotenv": "^8.2.0",
"punycode": "^1.4.1"
} }
} }

341
plugins.js Normal file
View File

@ -0,0 +1,341 @@
'use strict';
var P = module.exports;
var spawn = require('child_process').spawn;
var spawnSync = require('child_process').spawnSync;
var promisify = require('util').promisify;
// Exported for CLIs and such to override
P.PKG_DIR = __dirname;
P._loadStore = function(storeConf) {
return P._loadHelper(storeConf.module).then(function(plugin) {
return P._normalizeStore(storeConf.module, plugin.create(storeConf));
});
};
P._loadChallenge = function(chConfs, typ01) {
return P._loadHelper(chConfs[typ01].module).then(function(plugin) {
var ch = P._normalizeChallenge(
chConfs[typ01].module,
plugin.create(chConfs[typ01])
);
ch._type = typ01;
return ch;
});
};
P._loadHelper = function(modname) {
try {
return Promise.resolve(require(modname));
} catch (e) {
console.error("Could not load '%s'", modname);
console.error('Did you install it?');
console.error('\tnpm install --save %s', modname);
e.context = 'load_plugin';
throw e;
// Fun experiment, bad idea
/*
return P._install(modname).then(function() {
return require(modname);
});
*/
}
};
P._normalizeStore = function(name, store) {
var acc = store.accounts;
var crt = store.certificates;
var warned = false;
function warn() {
if (warned) {
return;
}
warned = true;
console.warn(
"'" +
name +
"' may have incorrect function signatures, or contains deprecated use of callbacks"
);
}
// accs
if (acc.check && 2 === acc.check.length) {
warn();
acc._thunk_check = acc.check;
acc.check = promisify(acc._thunk_check);
}
if (acc.set && 3 === acc.set.length) {
warn();
acc._thunk_set = acc.set;
acc.set = promisify(acc._thunk_set);
}
if (2 === acc.checkKeypair.length) {
warn();
acc._thunk_checkKeypair = acc.checkKeypair;
acc.checkKeypair = promisify(acc._thunk_checkKeypair);
}
if (3 === acc.setKeypair.length) {
warn();
acc._thunk_setKeypair = acc.setKeypair;
acc.setKeypair = promisify(acc._thunk_setKeypair);
}
// certs
if (2 === crt.check.length) {
warn();
crt._thunk_check = crt.check;
crt.check = promisify(crt._thunk_check);
}
if (3 === crt.set.length) {
warn();
crt._thunk_set = crt.set;
crt.set = promisify(crt._thunk_set);
}
if (2 === crt.checkKeypair.length) {
warn();
crt._thunk_checkKeypair = crt.checkKeypair;
crt.checkKeypair = promisify(crt._thunk_checkKeypair);
}
if (2 === crt.setKeypair.length) {
warn();
crt._thunk_setKeypair = crt.setKeypair;
crt.setKeypair = promisify(crt._thunk_setKeypair);
}
return store;
};
P._normalizeChallenge = function(name, ch) {
var gch = {};
var warned = false;
function warn() {
if (warned) {
return;
}
warned = true;
console.warn(
"'" +
name +
"' may have incorrect function signatures, or contains deprecated use of callbacks"
);
}
var warned2 = false;
function warn2() {
if (warned2) {
return;
}
warned2 = true;
console.warn(
"'" +
name +
"' did not return a Promise when called. This should be fixed by the maintainer."
);
}
function wrappy(fn) {
return function(_params) {
return Promise.resolve().then(function() {
var result = fn.call(ch, _params);
if (!result || !result.then) {
warn2();
}
return result;
});
};
}
// init, zones, set, get, remove, propagationDelay
if (ch.init) {
if (2 === ch.init.length) {
warn();
ch._thunk_init = ch.init;
ch.init = promisify(ch._thunk_init);
}
gch.init = wrappy(ch.init);
}
if (ch.zones) {
if (2 === ch.zones.length) {
warn();
ch._thunk_zones = ch.zones;
ch.zones = promisify(ch._thunk_zones);
}
gch.zones = wrappy(ch.zones);
}
if (2 === ch.set.length) {
warn();
ch._thunk_set = ch.set;
ch.set = promisify(ch._thunk_set);
}
gch.set = wrappy(ch.set);
if (2 === ch.remove.length) {
warn();
ch._thunk_remove = ch.remove;
ch.remove = promisify(ch._thunk_remove);
}
gch.remove = wrappy(ch.remove);
if (ch.get) {
if (2 === ch.get.length) {
warn();
ch._thunk_get = ch.get;
ch.get = promisify(ch._thunk_get);
}
gch.get = wrappy(ch.get);
}
if("number" === typeof ch.propagationDelay) {
gch.propagationDelay = ch.propagationDelay;
}
return gch;
};
P._loadSync = function(modname) {
try {
return require(modname);
} catch (e) {
console.error("Could not load '%s'", modname);
console.error('Did you install it?');
console.error('\tnpm install --save %s', modname);
e.context = 'load_plugin';
throw e;
}
/*
try {
mod = require(modname);
} catch (e) {
P._installSync(modname);
mod = require(modname);
}
*/
};
P._installSync = function(moduleName) {
try {
return require(moduleName);
} catch (e) {
// continue
}
var npm = 'npm';
var args = ['install', '--save', moduleName];
var out = '';
var cmd;
try {
cmd = spawnSync(npm, args, {
cwd: P.PKG_DIR,
windowsHide: true
});
} catch (e) {
console.error(
"Failed to start: '" +
npm +
' ' +
args.join(' ') +
"' in '" +
P.PKG_DIR +
"'"
);
console.error(e.message);
process.exit(1);
}
if (!cmd.status) {
return;
}
out += cmd.stdout.toString('utf8');
out += cmd.stderr.toString('utf8');
if (out) {
console.error(out);
console.error();
console.error();
}
console.error(
"Failed to run: '" +
npm +
' ' +
args.join(' ') +
"' in '" +
P.PKG_DIR +
"'"
);
console.error(
'Try for yourself:\n\tcd ' + P.PKG_DIR + '\n\tnpm ' + args.join(' ')
);
process.exit(1);
};
P._install = function(moduleName) {
return new Promise(function(resolve) {
if (!moduleName) {
throw new Error('no module name given');
}
var npm = 'npm';
var args = ['install', '--save', moduleName];
var out = '';
var cmd = spawn(npm, args, {
cwd: P.PKG_DIR,
windowsHide: true
});
cmd.stdout.on('data', function(chunk) {
out += chunk.toString('utf8');
});
cmd.stdout.on('data', function(chunk) {
out += chunk.toString('utf8');
});
cmd.on('error', function(e) {
console.error(
"Failed to start: '" +
npm +
' ' +
args.join(' ') +
"' in '" +
P.PKG_DIR +
"'"
);
console.error(e.message);
process.exit(1);
});
cmd.on('exit', function(code) {
if (!code) {
resolve();
return;
}
if (out) {
console.error(out);
console.error();
console.error();
}
console.error(
"Failed to run: '" +
npm +
' ' +
args.join(' ') +
"' in '" +
P.PKG_DIR +
"'"
);
console.error(
'Try for yourself:\n\tcd ' +
P.PKG_DIR +
'\n\tnpm ' +
args.join(' ')
);
process.exit(1);
});
});
};
if (require.main === module) {
P._installSync(process.argv[2]);
}

View File

@ -1,9 +0,0 @@
moved the tests to the examples folder
```bash
node examples/commandline.js example.com,www.example.com user@example.com agree
```
Try it for yourself.
Go watch [Let's Encrypt in (exactly) 90 seconds](https://daplie.com/articles/lets-encrypt-in-literally-90-seconds/) and swap out the Caddy instructions with the node instructions.

View File

@ -1 +0,0 @@
hello

31
tests/cli.sh Normal file
View File

@ -0,0 +1,31 @@
#!/bin/bash
set -e
# TODO notify if wildcard is selected and no dns challenge is present
node bin/greenlock.js add --subject example.com --altnames 'example.com,*.example.com'
node bin/greenlock.js update --subject example.com
node bin/greenlock.js config --subject example.com
node bin/greenlock.js config --subject *.example.com
node bin/greenlock.js defaults
node bin/greenlock.js defaults --account-key-type
node bin/greenlock.js defaults
# using --challenge-xx-xx-xxx is additive
node bin/greenlock.js defaults --challenge-dns-01 foo-http-01-bar --challenge-dns-01-token BIG_TOKEN
# using --challenge is exclusive (will delete things not mentioned)
node bin/greenlock.js defaults --challenge acme-http-01-standalone
# should delete all and add just this one anew
node bin/greenlock.js update --subject example.com --challenge bar-http-01-baz
# should add, leaving the existing
node bin/greenlock.js update --subject example.com --challenge-dns-01 baz-dns-01-qux --challenge-dns-01-token BIG_TOKEN
# should delete all and add just this one anew
node bin/greenlock.js update --subject example.com --challenge bar-http-01-baz
node bin/greenlock.js remove --subject example.com
# TODO test for failure
# node bin/greenlock.js add --subject example.com
# node bin/greenlock.js add --subject example --altnames example
# node bin/greenlock.js add --subject example.com --altnames '*.example.com'
# node bin/greenlock.js add --subject example.com --altnames '*.example.com,example.com'
# node bin/greenlock.js update --altnames example.com
# node bin/greenlock.js config foo.example.com

View File

@ -1,14 +0,0 @@
'use strict';
var path = require('path');
module.exports = {
server: "https://acme-staging.api.letsencrypt.org/directory"
, tlsSni01Port: 5001
, http01Port: 80
, webrootPath: path.join(__dirname, "acme-challenge")
, configDir: path.join(__dirname, "letsencrypt.config")
, workDir: path.join(__dirname, "letsencrypt.work")
, logsDir: path.join(__dirname, "letsencrypt.logs")
, allowedDomains: ['example.com']
};

54
tests/index.js Normal file
View File

@ -0,0 +1,54 @@
'use strict';
require('dotenv').config();
var Greenlock = require('../');
var subject = process.env.BASE_DOMAIN;
var altnames = [subject, '*.' + subject, 'foo.bar.' + subject];
var email = process.env.SUBSCRIBER_EMAIL;
var challenge = JSON.parse(process.env.CHALLENGE_OPTIONS);
challenge.module = process.env.CHALLENGE_PLUGIN;
var greenlock = Greenlock.create({
packageAgent: 'Greenlock_Test/v0',
maintainerEmail: email,
staging: true,
manager: require('greenlock-manager-fs').create({
//configFile: '~/.config/greenlock/certs.json',
})
});
greenlock.manager
.defaults({
agreeToTerms: true,
subscriberEmail: email,
challenges: {
'dns-01': challenge
}
//store: args.storeOpts,
//renewOffset: args.renewOffset || '30d',
//renewStagger: '1d'
})
.then(function() {
return greenlock
.add({
subject: subject,
altnames: altnames,
subscriberEmail: email
})
.then(function() {
return greenlock
.get({ servername: subject })
.then(function(pems) {
if (pems && pems.privkey && pems.cert && pems.chain) {
console.info('Success');
}
//console.log(pems);
});
});
})
.catch(function(e) {
console.error('Big bad error:', e.code);
console.error(e);
});

7
user-events.js Normal file
View File

@ -0,0 +1,7 @@
'use strict';
var UserEvents = module.exports;
UserEvents.notify = function() {
// TODO not implemented yet
};

283
utils.js
View File

@ -1,20 +1,281 @@
'use strict'; 'use strict';
var re = /^[a-zA-Z0-9\.\-]+$/; var U = module.exports;
var promisify = require('util').promisify;
//var resolveSoa = promisify(require('dns').resolveSoa);
var resolveMx = promisify(require('dns').resolveMx);
var punycode = require('punycode'); var punycode = require('punycode');
var Keypairs = require('@root/keypairs');
// TODO move to @root
var certParser = require('cert-info');
var utils = module.exports; U._parseDuration = function(str) {
if ('number' === typeof str) {
utils.isValidDomain = function (domain) { return str;
if (re.test(domain)) {
return domain;
} }
domain = punycode.toASCII(domain); var pattern = /^(\-?\d+(\.\d+)?)([wdhms]|ms)$/;
var matches = str.match(pattern);
if (re.test(domain)) { if (!matches || !matches[0]) {
return domain; throw new Error('invalid duration string: ' + str);
} }
return ''; var n = parseInt(matches[1], 10);
var unit = matches[3];
switch (unit) {
case 'w':
n *= 7;
/*falls through*/
case 'd':
n *= 24;
/*falls through*/
case 'h':
n *= 60;
/*falls through*/
case 'm':
n *= 60;
/*falls through*/
case 's':
n *= 1000;
/*falls through*/
case 'ms':
n *= 1; // for completeness
}
return n;
};
U._encodeName = function(str) {
return punycode.toASCII(str.toLowerCase(str));
};
U._validName = function(str) {
// A quick check of the 38 and two ½ valid characters
// 253 char max full domain, including dots
// 63 char max each label segment
// Note: * is not allowed, but it's allowable here
// Note: _ (underscore) is only allowed for "domain names", not "hostnames"
// Note: - (hyphen) is not allowed as a first character (but a number is)
return (
/^(\*\.)?[a-z0-9_\.\-]+\.[a-z0-9_\.\-]+$/.test(str) &&
str.length < 254 &&
str.split('.').every(function(label) {
return label.length > 0 && label.length < 64;
})
);
};
U._validMx = function(email) {
var host = email.split('@').slice(1)[0];
// try twice, just because DNS hiccups sometimes
// Note: we don't care if the domain exists, just that it *can* exist
return resolveMx(host).catch(function() {
return U._timeout(1000).then(function() {
return resolveMx(host);
});
});
};
// should be called after _validName
U._validDomain = function(str) {
// TODO use @root/dns (currently dns-suite)
// because node's dns can't read Authority records
return Promise.resolve(str);
/*
// try twice, just because DNS hiccups sometimes
// Note: we don't care if the domain exists, just that it *can* exist
return resolveSoa(str).catch(function() {
return U._timeout(1000).then(function() {
return resolveSoa(str);
});
});
*/
};
// foo.example.com and *.example.com overlap
// should be called after _validName
// (which enforces *. or no *)
U._uniqueNames = function(altnames) {
var dups = {};
var wilds = {};
if (
altnames.some(function(w) {
if ('*.' !== w.slice(0, 2)) {
return;
}
if (wilds[w]) {
return true;
}
wilds[w] = true;
})
) {
return false;
}
return altnames.every(function(name) {
var w;
if ('*.' !== name.slice(0, 2)) {
w =
'*.' +
name
.split('.')
.slice(1)
.join('.');
} else {
return true;
}
if (!dups[name] && !dups[w]) {
dups[name] = true;
return true;
}
});
};
U._timeout = function(d) {
return new Promise(function(resolve) {
setTimeout(resolve, d);
});
};
U._genKeypair = function(keyType) {
var keyopts;
var len = parseInt(keyType.replace(/.*?(\d)/, '$1') || 0, 10);
if (/RSA/.test(keyType)) {
keyopts = {
kty: 'RSA',
modulusLength: len || 2048
};
} else if (/^(EC|P\-?\d)/i.test(keyType)) {
keyopts = {
kty: 'EC',
namedCurve: 'P-' + (len || 256)
};
} else {
// TODO put in ./errors.js
throw new Error('invalid key type: ' + keyType);
}
return Keypairs.generate(keyopts).then(function(pair) {
return U._jwkToSet(pair.private);
});
};
// TODO use ACME._importKeypair ??
U._importKeypair = function(keypair) {
// this should import all formats equally well:
// 'object' (JWK), 'string' (private key pem), kp.privateKeyPem, kp.privateKeyJwk
if (keypair.private || keypair.d) {
return U._jwkToSet(keypair.private || keypair);
}
if (keypair.privateKeyJwk) {
return U._jwkToSet(keypair.privateKeyJwk);
}
if ('string' !== typeof keypair && !keypair.privateKeyPem) {
// TODO put in errors
throw new Error('missing private key');
}
return Keypairs.import({ pem: keypair.privateKeyPem || keypair }).then(
function(priv) {
if (!priv.d) {
throw new Error('missing private key');
}
return U._jwkToSet(priv);
}
);
};
U._jwkToSet = function(jwk) {
var keypair = {
privateKeyJwk: jwk
};
return Promise.all([
Keypairs.export({
jwk: jwk,
encoding: 'pem'
}).then(function(pem) {
keypair.privateKeyPem = pem;
}),
Keypairs.export({
jwk: jwk,
encoding: 'pem',
public: true
}).then(function(pem) {
keypair.publicKeyPem = pem;
}),
Keypairs.publish({
jwk: jwk
}).then(function(pub) {
keypair.publicKeyJwk = pub;
})
]).then(function() {
return keypair;
});
};
U._attachCertInfo = function(results) {
var certInfo = certParser.info(results.cert);
// subject, altnames, issuedAt, expiresAt
Object.keys(certInfo).forEach(function(key) {
results[key] = certInfo[key];
});
return results;
};
U._certHasDomain = function(certInfo, _domain) {
var names = (certInfo.altnames || []).slice(0);
return names.some(function(name) {
var domain = _domain.toLowerCase();
name = name.toLowerCase();
if ('*.' === name.substr(0, 2)) {
name = name.substr(2);
domain = domain
.split('.')
.slice(1)
.join('.');
}
return name === domain;
});
};
// a bit heavy to be labeled 'utils'... perhaps 'common' would be better?
U._getOrCreateKeypair = function(db, subject, query, keyType, mustExist) {
var exists = false;
return db
.checkKeypair(query)
.then(function(kp) {
if (kp) {
exists = true;
return U._importKeypair(kp);
}
if (mustExist) {
// TODO put in errors
throw new Error(
'required keypair not found: ' +
(subject || '') +
' ' +
JSON.stringify(query)
);
}
return U._genKeypair(keyType);
})
.then(function(keypair) {
return { exists: exists, keypair: keypair };
});
};
U._getKeypair = function(db, subject, query) {
return U._getOrCreateKeypair(db, subject, query, '', true).then(function(
result
) {
return result.keypair;
});
}; };