- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate
-parts of the General Public License. Of course, your program's commands might
-be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if
-any, to sign a *copyright disclaimer* for the program, if necessary. For more
-information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may consider
-it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead
-of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-esp-engine-svc/build.gradle b/lc-esp-engine-svc/build.gradle
deleted file mode 100644
index 0e04353704746b9a20eb2e2ca8ba5f049b7baf0f..0000000000000000000000000000000000000000
--- a/lc-esp-engine-svc/build.gradle
+++ /dev/null
@@ -1,70 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-group 'leighco'
-version '18.11'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-broker
- implementation group: 'org.apache.activemq', name: 'activemq-broker', version: '5.17.1'
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-kahadb-store
- implementation group: 'org.apache.activemq', name: 'activemq-kahadb-store', version: '5.17.1'
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-mqtt
- implementation 'org.apache.activemq:activemq-mqtt:5.17.1'
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-stomp
- implementation group: 'org.apache.activemq', name: 'activemq-stomp', version: '5.17.1'
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-http
- implementation group: 'org.apache.activemq', name: 'activemq-http', version: '5.17.1'
-// https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server
- implementation 'org.eclipse.jetty:jetty-server:9.4.46.v20220331'
-// https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml
- implementation 'org.eclipse.jetty:jetty-xml:9.4.46.v20220331'
-// https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp
- implementation 'org.eclipse.jetty:jetty-webapp:9.4.46.v20220331'
-// https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient
- // https://mvnrepository.com/artifact/org.eclipse.jetty.websocket/websocket-server
- implementation 'org.eclipse.jetty.websocket:websocket-server:9.4.46.v20220331'
-
- implementation 'org.apache.httpcomponents:httpclient:4.5.13'
- implementation 'javax.servlet:javax.servlet-api:4.0.1'
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
-
-application {
- mainClass = 'lc.esp.engine.service.EngineService'
-}
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-EXPOSE 52018
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
diff --git a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EngineService.java b/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EngineService.java
deleted file mode 100644
index e36e273a2837fe8a3a3041a7440f808ff0f4a33b..0000000000000000000000000000000000000000
--- a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EngineService.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package lc.esp.engine.service;
-
-import lc.esp.sdk.ESPAddress;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import lc.mecha.util.UniversalJob;
-import org.apache.activemq.broker.BrokerPlugin;
-import org.apache.activemq.broker.BrokerService;
-import org.apache.activemq.broker.region.DestinationInterceptor;
-import org.apache.activemq.broker.region.virtual.VirtualDestination;
-import org.apache.activemq.broker.region.virtual.VirtualDestinationInterceptor;
-import org.apache.activemq.broker.region.virtual.VirtualTopic;
-import org.apache.activemq.usage.SystemUsage;
-
-import static java.lang.Thread.sleep;
-
-public class EngineService extends BasicallyDangerous {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(EngineService.class);
-
- public static void main(String args[]) throws InterruptedException {
- UniversalJob.banner(logger, "Enhanced Services Platform mk18/ENGINE (GIPSY DANGER)");
-
- EngineService eng = new EngineService();
- eng.run();
-
- //noinspection InfiniteLoopStatement
- while (true) {
- //noinspection BusyWait
- sleep(1000);
- }
- }
-
- @Override
- public void runDangerously() throws Exception {
- BrokerService broker = new BrokerService();
- broker.addConnector("tcp://0.0.0.0:52018");
- broker.addConnector("mqtt://0.0.0.0:52019");
- broker.addConnector("stomp://0.0.0.0:52020");
- broker.addConnector("ws://0.0.0.0:52021");
- broker.setPersistent(true);
-
- VirtualDestinationInterceptor virtualDestinationInterceptor = new VirtualDestinationInterceptor();
- VirtualTopic vTopic = new VirtualTopic();
- vTopic.setName(">");
- vTopic.setPrefix(ESPAddress.VIRTUAL_TOPIC_PREFIX + ".*.");
- vTopic.setLocal(false);
- // Note ramifications: https://activemq.apache.org/virtual-destinations
- vTopic.setSelectorAware(true);
- virtualDestinationInterceptor.setVirtualDestinations(new VirtualDestination[]{vTopic});
- DestinationInterceptor[] destinationInterceptors = new DestinationInterceptor[]{virtualDestinationInterceptor};
- broker.setDestinationInterceptors(destinationInterceptors);
- broker.setPlugins(new BrokerPlugin[]{new EnhancedBrokerPlugin()});
- new EnhancedBrokerPlugin().installPlugin(broker.getBroker());
- broker.start();
-
- //noinspection InfiniteLoopStatement
- while (true) {
- SystemUsage su = broker.getConsumerSystemUsage();
- logger.info("{}", su.getMemoryUsage());
- logger.info("{}", su.getStoreUsage());
- logger.info("{}", su.getTempUsage());
- sleep(60 * 1000);
- }
- }
-}
\ No newline at end of file
diff --git a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBroker.java b/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBroker.java
deleted file mode 100644
index 53e0c4584ac1ded8105ac2d1f8a71e8f7de236ca..0000000000000000000000000000000000000000
--- a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBroker.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package lc.esp.engine.service;
-
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.apache.activemq.broker.Broker;
-import org.apache.activemq.broker.BrokerFilter;
-import org.apache.activemq.broker.ProducerBrokerExchange;
-import org.apache.activemq.command.ActiveMQBytesMessage;
-import org.apache.activemq.command.ActiveMQDestination;
-import org.apache.activemq.command.Message;
-import org.apache.activemq.util.ByteSequence;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-
-/**
- * This class implements the Enhanced Services Platform mechanics for ActiveMQ.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class EnhancedBroker extends BrokerFilter {
- public static final String KEY_TYPE = "type";
- public static final String KEY_PARAMETERS = "parameters";
- public static final String KEY_PAYLOAD = "payload";
- public static final String V18_TYPE = "lc.esp.api18.envelope";
- public static final String ESP_CORRELATION_ID = "lc.esp.correlation_id";
- public static final String ESP_REPLYTO = "lc.esp.reply_to";
-
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(EnhancedBroker.class);
-
- public EnhancedBroker(Broker next) {
- super(next);
- logger.info("Created new Enhanced Broker [broker: {}]", next);
- }
-
- public void send(ProducerBrokerExchange producerExchange, Message msg) throws Exception {
- logger.trace("send(): {}", msg);
-
- if (msg.getCorrelationId() != null && msg.getProperty(ESP_CORRELATION_ID) == null) {
- msg.setProperty(ESP_CORRELATION_ID, msg.getCorrelationId());
- } else if (msg.getCorrelationId() == null) {
- Object correlationId = msg.getProperty(ESP_CORRELATION_ID);
- if (correlationId != null) msg.setCorrelationId(correlationId.toString());
- }
-
- if (msg.getReplyTo() == null) {
- Object replyTo = msg.getProperty(ESP_REPLYTO);
- // Still works for topic as long as the destination is qualified
- if (replyTo != null) msg.setReplyTo(ActiveMQDestination.createDestination(replyTo.toString(),
- ActiveMQDestination.QUEUE_TYPE));
- }
-
- // For the time being all messages in ESP are JSON we can let this ride
- if (msg instanceof ActiveMQBytesMessage) {
- String s = new String(msg.getContent().getData());
- JSONObject envelope = new JSONObject(s);
- String type = envelope.optString(KEY_TYPE);
- logger.trace("Got type: {}", type);
- if (type != null) {
- if (type.equals(V18_TYPE)) {
- JSONObject tags = envelope.optJSONObject(KEY_PARAMETERS);
- if (tags != null) {
- for (String key : tags.keySet()) {
- try {
- msg.setProperty(key, tags.get(key));
- } catch (IOException e) {
- logger.warn("Unable to set property: {}", e);
- }
- }
- }
- JSONObject payload = envelope.getJSONObject(KEY_PAYLOAD);
- logger.trace("Replacing messaging contents: {}", payload);
- msg.setContent(new ByteSequence(payload.toString().getBytes(StandardCharsets.UTF_8)));
- }
- }
- }
-
- super.send(producerExchange, msg);
- }
-}
diff --git a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBrokerPlugin.java b/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBrokerPlugin.java
deleted file mode 100644
index 713e94b8b864117a9cefbfa557d4807d5a202973..0000000000000000000000000000000000000000
--- a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/EnhancedBrokerPlugin.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package lc.esp.engine.service;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.apache.activemq.broker.Broker;
-import org.apache.activemq.broker.BrokerPlugin;
-
-public class EnhancedBrokerPlugin implements BrokerPlugin {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(EnhancedBrokerPlugin.class);
-
- @Override
- public Broker installPlugin(Broker broker) throws Exception {
- return new EnhancedBroker(broker);
- }
-}
diff --git a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/NoisyBrokerFilter.java b/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/NoisyBrokerFilter.java
deleted file mode 100644
index 19ec8b47aae39b8dd183bcd197166fbf66ede256..0000000000000000000000000000000000000000
--- a/lc-esp-engine-svc/src/main/java/lc/esp/engine/service/NoisyBrokerFilter.java
+++ /dev/null
@@ -1,429 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package lc.esp.engine.service;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.apache.activemq.broker.*;
-import org.apache.activemq.broker.region.Destination;
-import org.apache.activemq.broker.region.MessageReference;
-import org.apache.activemq.broker.region.Subscription;
-import org.apache.activemq.broker.region.virtual.VirtualDestination;
-import org.apache.activemq.command.*;
-import org.apache.activemq.store.PListStore;
-import org.apache.activemq.thread.Scheduler;
-import org.apache.activemq.usage.Usage;
-
-import java.net.URI;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ThreadPoolExecutor;
-
-/**
- * A "noisy" implementation of a {@link BrokerFilter} which logs its transactions.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class NoisyBrokerFilter implements Broker {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(BrokerFilter.class);
- protected final Broker next;
-
- public NoisyBrokerFilter(Broker next) {
- this.next = next;
- }
-
- public Broker getNext() {
- return next;
- }
-
- @Override
- public Broker getAdaptor(Class> type) {
- return type.isInstance(this) ? this : getNext().getAdaptor(type);
- }
-
- @Override
- public Map getDestinationMap() {
- return getNext().getDestinationMap();
- }
-
- @Override
- public Map getDestinationMap(ActiveMQDestination destination) {
- return getNext().getDestinationMap(destination);
- }
-
- @Override
- public Set getDestinations(ActiveMQDestination destination) {
- return getNext().getDestinations(destination);
- }
-
- @Override
- public void acknowledge(ConsumerBrokerExchange consumerExchange, MessageAck ack) throws Exception {
- logger.info("acknowledge()");
- getNext().acknowledge(consumerExchange, ack);
- }
-
- @Override
- public Response messagePull(ConnectionContext context, MessagePull pull) throws Exception {
- logger.info("messagePull()");
- return getNext().messagePull(context, pull);
- }
-
- @Override
- public void addConnection(ConnectionContext context, ConnectionInfo info) throws Exception {
- logger.info("addConnection()");
- getNext().addConnection(context, info);
- }
-
- @Override
- public Subscription addConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
- logger.info("addConsumer()");
- return getNext().addConsumer(context, info);
- }
-
- @Override
- public void addProducer(ConnectionContext context, ProducerInfo info) throws Exception {
- logger.info("addProducer()");
- getNext().addProducer(context, info);
- }
-
- @Override
- public void commitTransaction(ConnectionContext context, TransactionId xid, boolean onePhase) throws Exception {
- logger.info("commitTransaction()");
- getNext().commitTransaction(context, xid, onePhase);
- }
-
- @Override
- public void removeSubscription(ConnectionContext context, RemoveSubscriptionInfo info) throws Exception {
- logger.info("removeSubscription()");
- getNext().removeSubscription(context, info);
- }
-
- @Override
- public TransactionId[] getPreparedTransactions(ConnectionContext context) throws Exception {
- logger.info("getPreparedTransactions()");
- return getNext().getPreparedTransactions(context);
- }
-
- @Override
- public int prepareTransaction(ConnectionContext context, TransactionId xid) throws Exception {
- logger.info("prepareTransaction()");
- return getNext().prepareTransaction(context, xid);
- }
-
- @Override
- public void removeConnection(ConnectionContext context, ConnectionInfo info, Throwable error) throws Exception {
- logger.info("removeConnection()");
- getNext().removeConnection(context, info, error);
- }
-
- @Override
- public void removeConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
- logger.info("removeConsumer()");
- getNext().removeConsumer(context, info);
- }
-
- @Override
- public void removeProducer(ConnectionContext context, ProducerInfo info) throws Exception {
- logger.info("removeProducer()");
- getNext().removeProducer(context, info);
- }
-
- @Override
- public void rollbackTransaction(ConnectionContext context, TransactionId xid) throws Exception {
- logger.info("rollbackTransaction()");
- getNext().rollbackTransaction(context, xid);
- }
-
- @Override
- public void send(ProducerBrokerExchange producerExchange, Message messageSend) throws Exception {
- logger.info("send()");
- getNext().send(producerExchange, messageSend);
- }
-
- @Override
- public void beginTransaction(ConnectionContext context, TransactionId xid) throws Exception {
- logger.info("beginTransaction()");
- getNext().beginTransaction(context, xid);
- }
-
- @Override
- public void forgetTransaction(ConnectionContext context, TransactionId transactionId) throws Exception {
- logger.info("forgetTransaction()");
- getNext().forgetTransaction(context, transactionId);
- }
-
- @Override
- public Connection[] getClients() throws Exception {
- logger.info("getClients()");
- return getNext().getClients();
- }
-
- @Override
- public Destination addDestination(ConnectionContext context, ActiveMQDestination destination, boolean createIfTemporary) throws Exception {
- logger.info("addDestination()");
- return getNext().addDestination(context, destination, createIfTemporary);
- }
-
- @Override
- public void removeDestination(ConnectionContext context, ActiveMQDestination destination, long timeout) throws Exception {
- logger.info("acknowledge()");
- getNext().removeDestination(context, destination, timeout);
- }
-
- @Override
- public ActiveMQDestination[] getDestinations() throws Exception {
- logger.info("getDestinations()");
- return getNext().getDestinations();
- }
-
- @Override
- public void start() throws Exception {
- getNext().start();
- }
-
- @Override
- public void stop() throws Exception {
- getNext().stop();
- }
-
- @Override
- public void addSession(ConnectionContext context, SessionInfo info) throws Exception {
- logger.info("addSession()");
- getNext().addSession(context, info);
- }
-
- @Override
- public void removeSession(ConnectionContext context, SessionInfo info) throws Exception {
- logger.info("removeSession()");
- getNext().removeSession(context, info);
- }
-
- @Override
- public BrokerId getBrokerId() {
- return getNext().getBrokerId();
- }
-
- @Override
- public String getBrokerName() {
- return getNext().getBrokerName();
- }
-
- @Override
- public void gc() {
- getNext().gc();
- }
-
- @Override
- public void addBroker(Connection connection, BrokerInfo info) {
- getNext().addBroker(connection, info);
- }
-
- @Override
- public void removeBroker(Connection connection, BrokerInfo info) {
- getNext().removeBroker(connection, info);
- }
-
- @Override
- public BrokerInfo[] getPeerBrokerInfos() {
- return getNext().getPeerBrokerInfos();
- }
-
- @Override
- public void preProcessDispatch(MessageDispatch messageDispatch) {
- getNext().preProcessDispatch(messageDispatch);
- }
-
- @Override
- public void postProcessDispatch(MessageDispatch messageDispatch) {
- getNext().postProcessDispatch(messageDispatch);
- }
-
- @Override
- public void processDispatchNotification(MessageDispatchNotification messageDispatchNotification) throws Exception {
- getNext().processDispatchNotification(messageDispatchNotification);
- }
-
- @Override
- public boolean isStopped() {
- return getNext().isStopped();
- }
-
- @Override
- public Set getDurableDestinations() {
- return getNext().getDurableDestinations();
- }
-
- @Override
- public void addDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
- getNext().addDestinationInfo(context, info);
- }
-
- @Override
- public void removeDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
- getNext().removeDestinationInfo(context, info);
- }
-
- @Override
- public boolean isFaultTolerantConfiguration() {
- return getNext().isFaultTolerantConfiguration();
- }
-
- @Override
- public ConnectionContext getAdminConnectionContext() {
- return getNext().getAdminConnectionContext();
- }
-
- @Override
- public void setAdminConnectionContext(ConnectionContext adminConnectionContext) {
- getNext().setAdminConnectionContext(adminConnectionContext);
- }
-
- @Override
- public PListStore getTempDataStore() {
- return getNext().getTempDataStore();
- }
-
- @Override
- public URI getVmConnectorURI() {
- return getNext().getVmConnectorURI();
- }
-
- @Override
- public void brokerServiceStarted() {
- getNext().brokerServiceStarted();
- }
-
- @Override
- public BrokerService getBrokerService() {
- return getNext().getBrokerService();
- }
-
- @Override
- public boolean isExpired(MessageReference messageReference) {
- return getNext().isExpired(messageReference);
- }
-
- @Override
- public void messageExpired(ConnectionContext context, MessageReference message, Subscription subscription) {
- getNext().messageExpired(context, message, subscription);
- }
-
- @Override
- public boolean sendToDeadLetterQueue(ConnectionContext context, MessageReference messageReference,
- Subscription subscription, Throwable poisonCause) {
- return getNext().sendToDeadLetterQueue(context, messageReference, subscription, poisonCause);
- }
-
- @Override
- public Broker getRoot() {
- return getNext().getRoot();
- }
-
- @Override
- public long getBrokerSequenceId() {
- return getNext().getBrokerSequenceId();
- }
-
-
- @Override
- public void fastProducer(ConnectionContext context, ProducerInfo producerInfo, ActiveMQDestination destination) {
- getNext().fastProducer(context, producerInfo, destination);
- }
-
- @Override
- public void isFull(ConnectionContext context, Destination destination, Usage> usage) {
- logger.info("isFull()");
- getNext().isFull(context, destination, usage);
- }
-
- @Override
- public void messageConsumed(ConnectionContext context, MessageReference messageReference) {
- logger.info("messageConsumed()");
- getNext().messageConsumed(context, messageReference);
- }
-
- @Override
- public void messageDelivered(ConnectionContext context, MessageReference messageReference) {
- logger.info("messageDelivered()");
- getNext().messageDelivered(context, messageReference);
- }
-
- @Override
- public void messageDiscarded(ConnectionContext context, Subscription sub, MessageReference messageReference) {
- logger.info("messageDiscarded()");
- getNext().messageDiscarded(context, sub, messageReference);
- }
-
- @Override
- public void slowConsumer(ConnectionContext context, Destination destination, Subscription subs) {
- logger.info("slowConsumer()");
- getNext().slowConsumer(context, destination, subs);
- }
-
- @Override
- public void virtualDestinationAdded(ConnectionContext context,
- VirtualDestination virtualDestination) {
- logger.info("virtualDestinationAdded()");
- getNext().virtualDestinationAdded(context, virtualDestination);
- }
-
- @Override
- public void virtualDestinationRemoved(ConnectionContext context,
- VirtualDestination virtualDestination) {
- logger.info("virtualDestinationRemoved()");
- getNext().virtualDestinationRemoved(context, virtualDestination);
- }
-
- @Override
- public void nowMasterBroker() {
- getNext().nowMasterBroker();
- }
-
- @Override
- public void processConsumerControl(ConsumerBrokerExchange consumerExchange,
- ConsumerControl control) {
- logger.info("processConsumerControl()");
- getNext().processConsumerControl(consumerExchange, control);
- }
-
- @Override
- public void reapplyInterceptor() {
- getNext().reapplyInterceptor();
- }
-
- @Override
- public Scheduler getScheduler() {
- return getNext().getScheduler();
- }
-
- @Override
- public ThreadPoolExecutor getExecutor() {
- return getNext().getExecutor();
- }
-
- @Override
- public void networkBridgeStarted(BrokerInfo brokerInfo, boolean createdByDuplex, String remoteIp) {
- logger.info("networkBridgeStarted()");
- getNext().networkBridgeStarted(brokerInfo, createdByDuplex, remoteIp);
- }
-
- @Override
- public void networkBridgeStopped(BrokerInfo brokerInfo) {
- getNext().networkBridgeStopped(brokerInfo);
- }
-}
diff --git a/lc-esp-monitor-app/.gitignore b/lc-esp-monitor-app/.gitignore
deleted file mode 100644
index 94143827ed065ca0d7d5be1b765d255c5c32cd9a..0000000000000000000000000000000000000000
--- a/lc-esp-monitor-app/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-Dockerfile
diff --git a/lc-esp-monitor-app/build.gradle b/lc-esp-monitor-app/build.gradle
deleted file mode 100644
index a16efff32a763d468309d55662c2e349cb4b3b29..0000000000000000000000000000000000000000
--- a/lc-esp-monitor-app/build.gradle
+++ /dev/null
@@ -1,49 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-group 'leighco'
-version '1.7'
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
-
-application {
- mainClass = 'lc.esp.monitor.app.MonitorApp'
-}
-
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
-
diff --git a/lc-esp-monitor-app/src/main/java/lc/esp/monitor/app/MonitorApp.java b/lc-esp-monitor-app/src/main/java/lc/esp/monitor/app/MonitorApp.java
deleted file mode 100644
index 2bb5cc13fc1fc79561616b56003169cfcc2a8857..0000000000000000000000000000000000000000
--- a/lc-esp-monitor-app/src/main/java/lc/esp/monitor/app/MonitorApp.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package lc.esp.monitor.app;
-
-import lc.esp.sdk.*;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-import org.apache.commons.lang.StringUtils;
-
-public class MonitorApp {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(MonitorApp.class);
- public static final String ENV_ADDRESS = "LC_ADDRESS";
-
- public static void main(String[] args) throws Exception {
- String address = System.getenv(ENV_ADDRESS);
- if (StringUtils.isEmpty(address)) {
- logger.info(ENV_ADDRESS + " environment variable must be set.");
- System.exit(UniversalJob.RET_BADARGS);
- }
-
- ESPClient esp = new ESPClient();
- esp.start();
-
- ESPAddress telemDest = new ESPAddress(address);
-
- logger.info("Reading from address: {}", telemDest);
-
- ESPSession session = esp.createSession();
- ESPConsumer consumer = session.createConsumer(telemDest);
-
- //noinspection InfiniteLoopStatement
- while (true) {
- ESPMessage msg = consumer.receive();
- System.out.println(msg.toJson());
- }
- }
-}
diff --git a/lc-esp-sdk-js/.babelrc b/lc-esp-sdk-js/.babelrc
deleted file mode 100644
index 5a29d463fff5c155c9a3ef90c0a52e6f70b1bebd..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/.babelrc
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "presets": [
- [
- "@babel/preset-env",
- {
- "targets": {
- "browsers": [
- "last 2 versions",
- "safari >= 7"
- ]
- },
- "modules": false
- }
- ]
- ]
-}
diff --git a/lc-esp-sdk-js/.gitignore b/lc-esp-sdk-js/.gitignore
deleted file mode 100644
index 30b172713759f7513bd7234fa95ceedd622c0c9e..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-dist/*js
-doc
diff --git a/lc-esp-sdk-js/build.sh b/lc-esp-sdk-js/build.sh
deleted file mode 100755
index f70aaeeb0346efe6e3c6195c9efedc7922fb165e..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/build.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh -x
-
-npm run build
-#npx webpack --mode production
-npx webpack --mode development
-documentation build src/index.js -f html -o doc
diff --git a/lc-esp-sdk-js/dist/test.html b/lc-esp-sdk-js/dist/test.html
deleted file mode 100644
index 83cd5d81fd076ebb5e8b00386f92065a83189a5d..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/dist/test.html
+++ /dev/null
@@ -1,16 +0,0 @@
-
-
- Test
-
-
-
-
-
-ESP SDK Test. To use: View JS console.
-
-
-
diff --git a/lc-esp-sdk-js/package-lock.json b/lc-esp-sdk-js/package-lock.json
deleted file mode 100644
index 82918a13d9edc93ee21a929606c31c4db601f1b4..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/package-lock.json
+++ /dev/null
@@ -1,7366 +0,0 @@
-{
- "name": "lc-esp-sdk-js",
- "version": "18.0.0",
- "lockfileVersion": 2,
- "requires": true,
- "packages": {
- "": {
- "name": "lc-esp-sdk-js",
- "version": "18.0.0",
- "license": "LGPL-3.0-only",
- "dependencies": {
- "@stomp/stompjs": "^6.1.2",
- "crypto-browserify": "^3.12.0",
- "lodash": "^4.17.21",
- "uuid": "^8.3.2",
- "websocket": "^1.0.34"
- },
- "devDependencies": {
- "@babel/core": "^7.12.10",
- "@babel/preset-env": "^7.12.11",
- "@rollup/plugin-node-resolve": "^13.2.1",
- "rollup": "^2.70.2",
- "rollup-plugin-babel": "^4.4.0",
- "rollup-plugin-commonjs": "^10.1.0",
- "rollup-plugin-node-resolve": "^5.2.0",
- "webpack": "^5.73.0",
- "webpack-cli": "^4.10.0"
- }
- },
- "node_modules/@ampproject/remapping": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz",
- "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==",
- "dev": true,
- "dependencies": {
- "@jridgewell/gen-mapping": "^0.1.0",
- "@jridgewell/trace-mapping": "^0.3.9"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@babel/code-frame": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz",
- "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==",
- "dev": true,
- "dependencies": {
- "@babel/highlight": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/compat-data": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.17.7.tgz",
- "integrity": "sha512-p8pdE6j0a29TNGebNm7NzYZWB3xVZJBZ7XGs42uAKzQo8VQ3F0By/cQCtUEABwIqw5zo6WA4NbmxsfzADzMKnQ==",
- "dev": true,
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/core": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.17.9.tgz",
- "integrity": "sha512-5ug+SfZCpDAkVp9SFIZAzlW18rlzsOcJGaetCjkySnrXXDUw9AR8cDUm1iByTmdWM6yxX6/zycaV76w3YTF2gw==",
- "dev": true,
- "dependencies": {
- "@ampproject/remapping": "^2.1.0",
- "@babel/code-frame": "^7.16.7",
- "@babel/generator": "^7.17.9",
- "@babel/helper-compilation-targets": "^7.17.7",
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helpers": "^7.17.9",
- "@babel/parser": "^7.17.9",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.9",
- "@babel/types": "^7.17.0",
- "convert-source-map": "^1.7.0",
- "debug": "^4.1.0",
- "gensync": "^1.0.0-beta.2",
- "json5": "^2.2.1",
- "semver": "^6.3.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/babel"
- }
- },
- "node_modules/@babel/generator": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.17.9.tgz",
- "integrity": "sha512-rAdDousTwxbIxbz5I7GEQ3lUip+xVCXooZNbsydCWs3xA7ZsYOv+CFRdzGxRX78BmQHu9B1Eso59AOZQOJDEdQ==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.17.0",
- "jsesc": "^2.5.1",
- "source-map": "^0.5.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-annotate-as-pure": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz",
- "integrity": "sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz",
- "integrity": "sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-explode-assignable-expression": "^7.16.7",
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-compilation-targets": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.7.tgz",
- "integrity": "sha512-UFzlz2jjd8kroj0hmCFV5zr+tQPi1dpC2cRsDV/3IEW8bJfCPrPpmcSN6ZS8RqIq4LXcmpipCQFPddyFA5Yc7w==",
- "dev": true,
- "dependencies": {
- "@babel/compat-data": "^7.17.7",
- "@babel/helper-validator-option": "^7.16.7",
- "browserslist": "^4.17.5",
- "semver": "^6.3.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0"
- }
- },
- "node_modules/@babel/helper-create-class-features-plugin": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.17.9.tgz",
- "integrity": "sha512-kUjip3gruz6AJKOq5i3nC6CoCEEF/oHH3cp6tOZhB+IyyyPyW0g1Gfsxn3mkk6S08pIA2y8GQh609v9G/5sHVQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.17.9",
- "@babel/helper-member-expression-to-functions": "^7.17.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0"
- }
- },
- "node_modules/@babel/helper-create-regexp-features-plugin": {
- "version": "7.17.0",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.17.0.tgz",
- "integrity": "sha512-awO2So99wG6KnlE+TPs6rn83gCz5WlEePJDTnLEqbchMVrBeAujURVphRdigsk094VhvZehFoNOihSlcBjwsXA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "regexpu-core": "^5.0.1"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0"
- }
- },
- "node_modules/@babel/helper-define-polyfill-provider": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz",
- "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-compilation-targets": "^7.13.0",
- "@babel/helper-module-imports": "^7.12.13",
- "@babel/helper-plugin-utils": "^7.13.0",
- "@babel/traverse": "^7.13.0",
- "debug": "^4.1.1",
- "lodash.debounce": "^4.0.8",
- "resolve": "^1.14.2",
- "semver": "^6.1.2"
- },
- "peerDependencies": {
- "@babel/core": "^7.4.0-0"
- }
- },
- "node_modules/@babel/helper-environment-visitor": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz",
- "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-explode-assignable-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz",
- "integrity": "sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-function-name": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz",
- "integrity": "sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg==",
- "dev": true,
- "dependencies": {
- "@babel/template": "^7.16.7",
- "@babel/types": "^7.17.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-hoist-variables": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz",
- "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-member-expression-to-functions": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.17.7.tgz",
- "integrity": "sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.17.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-module-imports": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz",
- "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-module-transforms": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.17.7.tgz",
- "integrity": "sha512-VmZD99F3gNTYB7fJRDTi+u6l/zxY0BE6OIxPSU7a50s6ZUQkHwSDmV92FfM+oCG0pZRVojGYhkR8I0OGeCVREw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-module-imports": "^7.16.7",
- "@babel/helper-simple-access": "^7.17.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "@babel/helper-validator-identifier": "^7.16.7",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.3",
- "@babel/types": "^7.17.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-optimise-call-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz",
- "integrity": "sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-plugin-utils": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz",
- "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==",
- "dev": true,
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-remap-async-to-generator": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz",
- "integrity": "sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-wrap-function": "^7.16.8",
- "@babel/types": "^7.16.8"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-replace-supers": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz",
- "integrity": "sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-member-expression-to-functions": "^7.16.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/traverse": "^7.16.7",
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-simple-access": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.17.7.tgz",
- "integrity": "sha512-txyMCGroZ96i+Pxr3Je3lzEJjqwaRC9buMUgtomcrLe5Nd0+fk1h0LLA+ixUF5OW7AhHuQ7Es1WcQJZmZsz2XA==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.17.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-skip-transparent-expression-wrappers": {
- "version": "7.16.0",
- "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz",
- "integrity": "sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-split-export-declaration": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz",
- "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==",
- "dev": true,
- "dependencies": {
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-validator-identifier": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz",
- "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==",
- "dev": true,
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-validator-option": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz",
- "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==",
- "dev": true,
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-wrap-function": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz",
- "integrity": "sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-function-name": "^7.16.7",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.16.8",
- "@babel/types": "^7.16.8"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helpers": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.17.9.tgz",
- "integrity": "sha512-cPCt915ShDWUEzEp3+UNRktO2n6v49l5RSnG9M5pS24hA+2FAc5si+Pn1i4VVbQQ+jh+bIZhPFQOJOzbrOYY1Q==",
- "dev": true,
- "dependencies": {
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.9",
- "@babel/types": "^7.17.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/highlight": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.17.9.tgz",
- "integrity": "sha512-J9PfEKCbFIv2X5bjTMiZu6Vf341N05QIY+d6FvVKynkG1S7G0j3I0QoRtWIrXhZ+/Nlb5Q0MzqL7TokEJ5BNHg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.16.7",
- "chalk": "^2.0.0",
- "js-tokens": "^4.0.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/parser": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.17.9.tgz",
- "integrity": "sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg==",
- "dev": true,
- "bin": {
- "parser": "bin/babel-parser.js"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz",
- "integrity": "sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0"
- }
- },
- "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz",
- "integrity": "sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0",
- "@babel/plugin-proposal-optional-chaining": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.13.0"
- }
- },
- "node_modules/@babel/plugin-proposal-async-generator-functions": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz",
- "integrity": "sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-remap-async-to-generator": "^7.16.8",
- "@babel/plugin-syntax-async-generators": "^7.8.4"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-class-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz",
- "integrity": "sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-class-static-block": {
- "version": "7.17.6",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.17.6.tgz",
- "integrity": "sha512-X/tididvL2zbs7jZCeeRJ8167U/+Ac135AM6jCAx6gYXDUviZV5Ku9UDvWS2NCuWlFjIRXklYhwo6HhAC7ETnA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.17.6",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-class-static-block": "^7.14.5"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.12.0"
- }
- },
- "node_modules/@babel/plugin-proposal-dynamic-import": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz",
- "integrity": "sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-dynamic-import": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-export-namespace-from": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz",
- "integrity": "sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-export-namespace-from": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-json-strings": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz",
- "integrity": "sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-json-strings": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-logical-assignment-operators": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz",
- "integrity": "sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-nullish-coalescing-operator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz",
- "integrity": "sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-numeric-separator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz",
- "integrity": "sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-numeric-separator": "^7.10.4"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-object-rest-spread": {
- "version": "7.17.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.17.3.tgz",
- "integrity": "sha512-yuL5iQA/TbZn+RGAfxQXfi7CNLmKi1f8zInn4IgobuCWcAb7i+zj4TYzQ9l8cEzVyJ89PDGuqxK1xZpUDISesw==",
- "dev": true,
- "dependencies": {
- "@babel/compat-data": "^7.17.0",
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
- "@babel/plugin-transform-parameters": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-optional-catch-binding": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz",
- "integrity": "sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-optional-catch-binding": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-optional-chaining": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz",
- "integrity": "sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0",
- "@babel/plugin-syntax-optional-chaining": "^7.8.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-private-methods": {
- "version": "7.16.11",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz",
- "integrity": "sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-class-features-plugin": "^7.16.10",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-private-property-in-object": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz",
- "integrity": "sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-create-class-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-private-property-in-object": "^7.14.5"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-proposal-unicode-property-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz",
- "integrity": "sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=4"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-async-generators": {
- "version": "7.8.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz",
- "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-class-properties": {
- "version": "7.12.13",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz",
- "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.12.13"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-class-static-block": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz",
- "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.14.5"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-dynamic-import": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz",
- "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-export-namespace-from": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz",
- "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.3"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-json-strings": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz",
- "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-logical-assignment-operators": {
- "version": "7.10.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz",
- "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.10.4"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz",
- "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-numeric-separator": {
- "version": "7.10.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz",
- "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.10.4"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-object-rest-spread": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz",
- "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-optional-catch-binding": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz",
- "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-optional-chaining": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz",
- "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.8.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-private-property-in-object": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz",
- "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.14.5"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-syntax-top-level-await": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz",
- "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.14.5"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-arrow-functions": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz",
- "integrity": "sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-async-to-generator": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz",
- "integrity": "sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-module-imports": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-remap-async-to-generator": "^7.16.8"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-block-scoped-functions": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz",
- "integrity": "sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-block-scoping": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz",
- "integrity": "sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-classes": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz",
- "integrity": "sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.16.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "globals": "^11.1.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-computed-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz",
- "integrity": "sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-destructuring": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.17.7.tgz",
- "integrity": "sha512-XVh0r5yq9sLR4vZ6eVZe8FKfIcSgaTBxVBRSYokRj2qksf6QerYnTxz9/GTuKTH/n/HwLP7t6gtlybHetJ/6hQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-dotall-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz",
- "integrity": "sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-duplicate-keys": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz",
- "integrity": "sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-exponentiation-operator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz",
- "integrity": "sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-for-of": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz",
- "integrity": "sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-function-name": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz",
- "integrity": "sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-function-name": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz",
- "integrity": "sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-member-expression-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz",
- "integrity": "sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-modules-amd": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz",
- "integrity": "sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g==",
- "dev": true,
- "dependencies": {
- "@babel/helper-module-transforms": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-modules-commonjs": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.17.9.tgz",
- "integrity": "sha512-2TBFd/r2I6VlYn0YRTz2JdazS+FoUuQ2rIFHoAxtyP/0G3D82SBLaRq9rnUkpqlLg03Byfl/+M32mpxjO6KaPw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-simple-access": "^7.17.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-modules-systemjs": {
- "version": "7.17.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.17.8.tgz",
- "integrity": "sha512-39reIkMTUVagzgA5x88zDYXPCMT6lcaRKs1+S9K6NKBPErbgO/w/kP8GlNQTC87b412ZTlmNgr3k2JrWgHH+Bw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-hoist-variables": "^7.16.7",
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-validator-identifier": "^7.16.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-modules-umd": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz",
- "integrity": "sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-module-transforms": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-named-capturing-groups-regex": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz",
- "integrity": "sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0"
- }
- },
- "node_modules/@babel/plugin-transform-new-target": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz",
- "integrity": "sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-object-super": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz",
- "integrity": "sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-parameters": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz",
- "integrity": "sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-property-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz",
- "integrity": "sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-regenerator": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.17.9.tgz",
- "integrity": "sha512-Lc2TfbxR1HOyn/c6b4Y/b6NHoTb67n/IoWLxTu4kC7h4KQnWlhCq2S8Tx0t2SVvv5Uu87Hs+6JEJ5kt2tYGylQ==",
- "dev": true,
- "dependencies": {
- "regenerator-transform": "^0.15.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-reserved-words": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz",
- "integrity": "sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-shorthand-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz",
- "integrity": "sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-spread": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz",
- "integrity": "sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-sticky-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz",
- "integrity": "sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-template-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz",
- "integrity": "sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-typeof-symbol": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz",
- "integrity": "sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-unicode-escapes": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz",
- "integrity": "sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/plugin-transform-unicode-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz",
- "integrity": "sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q==",
- "dev": true,
- "dependencies": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/preset-env": {
- "version": "7.16.11",
- "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.16.11.tgz",
- "integrity": "sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g==",
- "dev": true,
- "dependencies": {
- "@babel/compat-data": "^7.16.8",
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-validator-option": "^7.16.7",
- "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7",
- "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7",
- "@babel/plugin-proposal-async-generator-functions": "^7.16.8",
- "@babel/plugin-proposal-class-properties": "^7.16.7",
- "@babel/plugin-proposal-class-static-block": "^7.16.7",
- "@babel/plugin-proposal-dynamic-import": "^7.16.7",
- "@babel/plugin-proposal-export-namespace-from": "^7.16.7",
- "@babel/plugin-proposal-json-strings": "^7.16.7",
- "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7",
- "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
- "@babel/plugin-proposal-numeric-separator": "^7.16.7",
- "@babel/plugin-proposal-object-rest-spread": "^7.16.7",
- "@babel/plugin-proposal-optional-catch-binding": "^7.16.7",
- "@babel/plugin-proposal-optional-chaining": "^7.16.7",
- "@babel/plugin-proposal-private-methods": "^7.16.11",
- "@babel/plugin-proposal-private-property-in-object": "^7.16.7",
- "@babel/plugin-proposal-unicode-property-regex": "^7.16.7",
- "@babel/plugin-syntax-async-generators": "^7.8.4",
- "@babel/plugin-syntax-class-properties": "^7.12.13",
- "@babel/plugin-syntax-class-static-block": "^7.14.5",
- "@babel/plugin-syntax-dynamic-import": "^7.8.3",
- "@babel/plugin-syntax-export-namespace-from": "^7.8.3",
- "@babel/plugin-syntax-json-strings": "^7.8.3",
- "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4",
- "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3",
- "@babel/plugin-syntax-numeric-separator": "^7.10.4",
- "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
- "@babel/plugin-syntax-optional-catch-binding": "^7.8.3",
- "@babel/plugin-syntax-optional-chaining": "^7.8.3",
- "@babel/plugin-syntax-private-property-in-object": "^7.14.5",
- "@babel/plugin-syntax-top-level-await": "^7.14.5",
- "@babel/plugin-transform-arrow-functions": "^7.16.7",
- "@babel/plugin-transform-async-to-generator": "^7.16.8",
- "@babel/plugin-transform-block-scoped-functions": "^7.16.7",
- "@babel/plugin-transform-block-scoping": "^7.16.7",
- "@babel/plugin-transform-classes": "^7.16.7",
- "@babel/plugin-transform-computed-properties": "^7.16.7",
- "@babel/plugin-transform-destructuring": "^7.16.7",
- "@babel/plugin-transform-dotall-regex": "^7.16.7",
- "@babel/plugin-transform-duplicate-keys": "^7.16.7",
- "@babel/plugin-transform-exponentiation-operator": "^7.16.7",
- "@babel/plugin-transform-for-of": "^7.16.7",
- "@babel/plugin-transform-function-name": "^7.16.7",
- "@babel/plugin-transform-literals": "^7.16.7",
- "@babel/plugin-transform-member-expression-literals": "^7.16.7",
- "@babel/plugin-transform-modules-amd": "^7.16.7",
- "@babel/plugin-transform-modules-commonjs": "^7.16.8",
- "@babel/plugin-transform-modules-systemjs": "^7.16.7",
- "@babel/plugin-transform-modules-umd": "^7.16.7",
- "@babel/plugin-transform-named-capturing-groups-regex": "^7.16.8",
- "@babel/plugin-transform-new-target": "^7.16.7",
- "@babel/plugin-transform-object-super": "^7.16.7",
- "@babel/plugin-transform-parameters": "^7.16.7",
- "@babel/plugin-transform-property-literals": "^7.16.7",
- "@babel/plugin-transform-regenerator": "^7.16.7",
- "@babel/plugin-transform-reserved-words": "^7.16.7",
- "@babel/plugin-transform-shorthand-properties": "^7.16.7",
- "@babel/plugin-transform-spread": "^7.16.7",
- "@babel/plugin-transform-sticky-regex": "^7.16.7",
- "@babel/plugin-transform-template-literals": "^7.16.7",
- "@babel/plugin-transform-typeof-symbol": "^7.16.7",
- "@babel/plugin-transform-unicode-escapes": "^7.16.7",
- "@babel/plugin-transform-unicode-regex": "^7.16.7",
- "@babel/preset-modules": "^0.1.5",
- "@babel/types": "^7.16.8",
- "babel-plugin-polyfill-corejs2": "^0.3.0",
- "babel-plugin-polyfill-corejs3": "^0.5.0",
- "babel-plugin-polyfill-regenerator": "^0.3.0",
- "core-js-compat": "^3.20.2",
- "semver": "^6.3.0"
- },
- "engines": {
- "node": ">=6.9.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/preset-modules": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz",
- "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==",
- "dev": true,
- "dependencies": {
- "@babel/helper-plugin-utils": "^7.0.0",
- "@babel/plugin-proposal-unicode-property-regex": "^7.4.4",
- "@babel/plugin-transform-dotall-regex": "^7.4.4",
- "@babel/types": "^7.4.4",
- "esutils": "^2.0.2"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/@babel/runtime": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.9.tgz",
- "integrity": "sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==",
- "dev": true,
- "dependencies": {
- "regenerator-runtime": "^0.13.4"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/template": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz",
- "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==",
- "dev": true,
- "dependencies": {
- "@babel/code-frame": "^7.16.7",
- "@babel/parser": "^7.16.7",
- "@babel/types": "^7.16.7"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/traverse": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.17.9.tgz",
- "integrity": "sha512-PQO8sDIJ8SIwipTPiR71kJQCKQYB5NGImbOviK8K+kg5xkNSYXLBupuX9QhatFowrsvo9Hj8WgArg3W7ijNAQw==",
- "dev": true,
- "dependencies": {
- "@babel/code-frame": "^7.16.7",
- "@babel/generator": "^7.17.9",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.17.9",
- "@babel/helper-hoist-variables": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "@babel/parser": "^7.17.9",
- "@babel/types": "^7.17.0",
- "debug": "^4.1.0",
- "globals": "^11.1.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/types": {
- "version": "7.17.0",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.17.0.tgz",
- "integrity": "sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==",
- "dev": true,
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.16.7",
- "to-fast-properties": "^2.0.0"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@discoveryjs/json-ext": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz",
- "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==",
- "dev": true,
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/@jridgewell/gen-mapping": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.0.tgz",
- "integrity": "sha512-YH+BnkvuCiPR+MUOY6JIArdTIGrRtsxnLaIxPRy4CpGJ/V6OO6Gq/1J+FJEc4j5e5h6Bcy3/K7prlMrm93BJoA==",
- "dev": true,
- "dependencies": {
- "@jridgewell/set-array": "1.0.0",
- "@jridgewell/sourcemap-codec": "^1.4.10"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@jridgewell/resolve-uri": {
- "version": "3.0.6",
- "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.6.tgz",
- "integrity": "sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==",
- "dev": true,
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@jridgewell/set-array": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.0.0.tgz",
- "integrity": "sha512-LcqVnHCjOAj8BTCtjpwYZCMTn4yArusbdObCVRUYvBHhrR5fVLVyENG+UVWM4T4H/ufv7NiBLdprllxWs/5PaQ==",
- "deprecated": "incorrect UMD name",
- "dev": true,
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@jridgewell/sourcemap-codec": {
- "version": "1.4.11",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz",
- "integrity": "sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==",
- "dev": true
- },
- "node_modules/@jridgewell/trace-mapping": {
- "version": "0.3.9",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
- "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
- "dev": true,
- "dependencies": {
- "@jridgewell/resolve-uri": "^3.0.3",
- "@jridgewell/sourcemap-codec": "^1.4.10"
- }
- },
- "node_modules/@rollup/plugin-node-resolve": {
- "version": "13.2.1",
- "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-13.2.1.tgz",
- "integrity": "sha512-btX7kzGvp1JwShQI9V6IM841YKNPYjKCvUbNrQ2EcVYbULtUd/GH6wZ/qdqH13j9pOHBER+EZXNN2L8RSJhVRA==",
- "dev": true,
- "dependencies": {
- "@rollup/pluginutils": "^3.1.0",
- "@types/resolve": "1.17.1",
- "builtin-modules": "^3.1.0",
- "deepmerge": "^4.2.2",
- "is-module": "^1.0.0",
- "resolve": "^1.19.0"
- },
- "engines": {
- "node": ">= 10.0.0"
- },
- "peerDependencies": {
- "rollup": "^2.42.0"
- }
- },
- "node_modules/@rollup/plugin-node-resolve/node_modules/@types/resolve": {
- "version": "1.17.1",
- "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.17.1.tgz",
- "integrity": "sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==",
- "dev": true,
- "dependencies": {
- "@types/node": "*"
- }
- },
- "node_modules/@rollup/pluginutils": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.1.0.tgz",
- "integrity": "sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==",
- "dev": true,
- "dependencies": {
- "@types/estree": "0.0.39",
- "estree-walker": "^1.0.1",
- "picomatch": "^2.2.2"
- },
- "engines": {
- "node": ">= 8.0.0"
- },
- "peerDependencies": {
- "rollup": "^1.20.0||^2.0.0"
- }
- },
- "node_modules/@rollup/pluginutils/node_modules/@types/estree": {
- "version": "0.0.39",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz",
- "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==",
- "dev": true
- },
- "node_modules/@rollup/pluginutils/node_modules/estree-walker": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz",
- "integrity": "sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==",
- "dev": true
- },
- "node_modules/@stomp/stompjs": {
- "version": "6.1.2",
- "resolved": "https://registry.npmjs.org/@stomp/stompjs/-/stompjs-6.1.2.tgz",
- "integrity": "sha512-FHDTrIFM5Ospi4L3Xhj6v2+NzCVAeNDcBe95YjUWhWiRMrBF6uN3I7AUOlRgT6jU/2WQvvYK8ZaIxFfxFp+uHQ=="
- },
- "node_modules/@types/eslint": {
- "version": "8.4.1",
- "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
- "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==",
- "dev": true,
- "dependencies": {
- "@types/estree": "*",
- "@types/json-schema": "*"
- }
- },
- "node_modules/@types/eslint-scope": {
- "version": "3.7.3",
- "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz",
- "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==",
- "dev": true,
- "dependencies": {
- "@types/eslint": "*",
- "@types/estree": "*"
- }
- },
- "node_modules/@types/estree": {
- "version": "0.0.51",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz",
- "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==",
- "dev": true
- },
- "node_modules/@types/json-schema": {
- "version": "7.0.11",
- "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz",
- "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==",
- "dev": true
- },
- "node_modules/@types/node": {
- "version": "17.0.29",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.29.tgz",
- "integrity": "sha512-tx5jMmMFwx7wBwq/V7OohKDVb/JwJU5qCVkeLMh1//xycAJ/ESuw9aJ9SEtlCZDYi2pBfe4JkisSoAtbOsBNAA==",
- "dev": true
- },
- "node_modules/@types/resolve": {
- "version": "0.0.8",
- "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz",
- "integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==",
- "dev": true,
- "dependencies": {
- "@types/node": "*"
- }
- },
- "node_modules/@webassemblyjs/ast": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
- "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/helper-numbers": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/floating-point-hex-parser": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
- "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
- "dev": true
- },
- "node_modules/@webassemblyjs/helper-api-error": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
- "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
- "dev": true
- },
- "node_modules/@webassemblyjs/helper-buffer": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
- "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
- "dev": true
- },
- "node_modules/@webassemblyjs/helper-numbers": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
- "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/floating-point-hex-parser": "1.11.1",
- "@webassemblyjs/helper-api-error": "1.11.1",
- "@xtuc/long": "4.2.2"
- }
- },
- "node_modules/@webassemblyjs/helper-wasm-bytecode": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
- "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
- "dev": true
- },
- "node_modules/@webassemblyjs/helper-wasm-section": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
- "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/ieee754": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
- "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
- "dev": true,
- "dependencies": {
- "@xtuc/ieee754": "^1.2.0"
- }
- },
- "node_modules/@webassemblyjs/leb128": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
- "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
- "dev": true,
- "dependencies": {
- "@xtuc/long": "4.2.2"
- }
- },
- "node_modules/@webassemblyjs/utf8": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
- "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
- "dev": true
- },
- "node_modules/@webassemblyjs/wasm-edit": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
- "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/helper-wasm-section": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1",
- "@webassemblyjs/wasm-opt": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1",
- "@webassemblyjs/wast-printer": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/wasm-gen": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
- "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/ieee754": "1.11.1",
- "@webassemblyjs/leb128": "1.11.1",
- "@webassemblyjs/utf8": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/wasm-opt": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
- "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/wasm-parser": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
- "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-api-error": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/ieee754": "1.11.1",
- "@webassemblyjs/leb128": "1.11.1",
- "@webassemblyjs/utf8": "1.11.1"
- }
- },
- "node_modules/@webassemblyjs/wast-printer": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
- "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
- "dev": true,
- "dependencies": {
- "@webassemblyjs/ast": "1.11.1",
- "@xtuc/long": "4.2.2"
- }
- },
- "node_modules/@webpack-cli/configtest": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.2.0.tgz",
- "integrity": "sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg==",
- "dev": true,
- "peerDependencies": {
- "webpack": "4.x.x || 5.x.x",
- "webpack-cli": "4.x.x"
- }
- },
- "node_modules/@webpack-cli/info": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.5.0.tgz",
- "integrity": "sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ==",
- "dev": true,
- "dependencies": {
- "envinfo": "^7.7.3"
- },
- "peerDependencies": {
- "webpack-cli": "4.x.x"
- }
- },
- "node_modules/@webpack-cli/serve": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.7.0.tgz",
- "integrity": "sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q==",
- "dev": true,
- "peerDependencies": {
- "webpack-cli": "4.x.x"
- },
- "peerDependenciesMeta": {
- "webpack-dev-server": {
- "optional": true
- }
- }
- },
- "node_modules/@xtuc/ieee754": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz",
- "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==",
- "dev": true
- },
- "node_modules/@xtuc/long": {
- "version": "4.2.2",
- "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz",
- "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==",
- "dev": true
- },
- "node_modules/acorn": {
- "version": "8.7.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
- "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
- "dev": true,
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/acorn-import-assertions": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz",
- "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==",
- "dev": true,
- "peerDependencies": {
- "acorn": "^8"
- }
- },
- "node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/ajv-keywords": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz",
- "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==",
- "dev": true,
- "peerDependencies": {
- "ajv": "^6.9.1"
- }
- },
- "node_modules/ansi-styles": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
- "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
- "dev": true,
- "dependencies": {
- "color-convert": "^1.9.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/asn1.js": {
- "version": "5.4.1",
- "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz",
- "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==",
- "dependencies": {
- "bn.js": "^4.0.0",
- "inherits": "^2.0.1",
- "minimalistic-assert": "^1.0.0",
- "safer-buffer": "^2.1.0"
- }
- },
- "node_modules/asn1.js/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/babel-plugin-dynamic-import-node": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz",
- "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==",
- "dev": true,
- "dependencies": {
- "object.assign": "^4.1.0"
- }
- },
- "node_modules/babel-plugin-polyfill-corejs2": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz",
- "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==",
- "dev": true,
- "dependencies": {
- "@babel/compat-data": "^7.13.11",
- "@babel/helper-define-polyfill-provider": "^0.3.1",
- "semver": "^6.1.1"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/babel-plugin-polyfill-corejs3": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz",
- "integrity": "sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==",
- "dev": true,
- "dependencies": {
- "@babel/helper-define-polyfill-provider": "^0.3.1",
- "core-js-compat": "^3.21.0"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/babel-plugin-polyfill-regenerator": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz",
- "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==",
- "dev": true,
- "dependencies": {
- "@babel/helper-define-polyfill-provider": "^0.3.1"
- },
- "peerDependencies": {
- "@babel/core": "^7.0.0-0"
- }
- },
- "node_modules/bn.js": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz",
- "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ=="
- },
- "node_modules/brorand": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz",
- "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w=="
- },
- "node_modules/browserify-aes": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz",
- "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==",
- "dependencies": {
- "buffer-xor": "^1.0.3",
- "cipher-base": "^1.0.0",
- "create-hash": "^1.1.0",
- "evp_bytestokey": "^1.0.3",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- }
- },
- "node_modules/browserify-cipher": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz",
- "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==",
- "dependencies": {
- "browserify-aes": "^1.0.4",
- "browserify-des": "^1.0.0",
- "evp_bytestokey": "^1.0.0"
- }
- },
- "node_modules/browserify-des": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz",
- "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==",
- "dependencies": {
- "cipher-base": "^1.0.1",
- "des.js": "^1.0.0",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.1.2"
- }
- },
- "node_modules/browserify-rsa": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz",
- "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==",
- "dependencies": {
- "bn.js": "^5.0.0",
- "randombytes": "^2.0.1"
- }
- },
- "node_modules/browserify-sign": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz",
- "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==",
- "dependencies": {
- "bn.js": "^5.1.1",
- "browserify-rsa": "^4.0.1",
- "create-hash": "^1.2.0",
- "create-hmac": "^1.1.7",
- "elliptic": "^6.5.3",
- "inherits": "^2.0.4",
- "parse-asn1": "^5.1.5",
- "readable-stream": "^3.6.0",
- "safe-buffer": "^5.2.0"
- }
- },
- "node_modules/browserify-sign/node_modules/safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/browserslist": {
- "version": "4.20.3",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz",
- "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/browserslist"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/browserslist"
- }
- ],
- "dependencies": {
- "caniuse-lite": "^1.0.30001332",
- "electron-to-chromium": "^1.4.118",
- "escalade": "^3.1.1",
- "node-releases": "^2.0.3",
- "picocolors": "^1.0.0"
- },
- "bin": {
- "browserslist": "cli.js"
- },
- "engines": {
- "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
- }
- },
- "node_modules/buffer-from": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
- "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
- "dev": true
- },
- "node_modules/buffer-xor": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz",
- "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ=="
- },
- "node_modules/bufferutil": {
- "version": "4.0.6",
- "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.6.tgz",
- "integrity": "sha512-jduaYOYtnio4aIAyc6UbvPCVcgq7nYpVnucyxr6eCYg/Woad9Hf/oxxBRDnGGjPfjUm6j5O/uBWhIu4iLebFaw==",
- "hasInstallScript": true,
- "dependencies": {
- "node-gyp-build": "^4.3.0"
- },
- "engines": {
- "node": ">=6.14.2"
- }
- },
- "node_modules/builtin-modules": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz",
- "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==",
- "dev": true,
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/call-bind": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
- "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
- "dev": true,
- "dependencies": {
- "function-bind": "^1.1.1",
- "get-intrinsic": "^1.0.2"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/caniuse-lite": {
- "version": "1.0.30001332",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001332.tgz",
- "integrity": "sha512-10T30NYOEQtN6C11YGg411yebhvpnC6Z102+B95eAsN0oB6KUs01ivE8u+G6FMIRtIrVlYXhL+LUwQ3/hXwDWw==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/browserslist"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
- }
- ]
- },
- "node_modules/chalk": {
- "version": "2.4.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
- "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
- "dev": true,
- "dependencies": {
- "ansi-styles": "^3.2.1",
- "escape-string-regexp": "^1.0.5",
- "supports-color": "^5.3.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/chrome-trace-event": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz",
- "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==",
- "dev": true,
- "engines": {
- "node": ">=6.0"
- }
- },
- "node_modules/cipher-base": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz",
- "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==",
- "dependencies": {
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- }
- },
- "node_modules/clone-deep": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz",
- "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==",
- "dev": true,
- "dependencies": {
- "is-plain-object": "^2.0.4",
- "kind-of": "^6.0.2",
- "shallow-clone": "^3.0.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/color-convert": {
- "version": "1.9.3",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
- "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
- "dev": true,
- "dependencies": {
- "color-name": "1.1.3"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
- "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
- "dev": true
- },
- "node_modules/colorette": {
- "version": "2.0.16",
- "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz",
- "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==",
- "dev": true
- },
- "node_modules/convert-source-map": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz",
- "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==",
- "dev": true,
- "dependencies": {
- "safe-buffer": "~5.1.1"
- }
- },
- "node_modules/core-js-compat": {
- "version": "3.22.2",
- "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.22.2.tgz",
- "integrity": "sha512-Fns9lU06ZJ07pdfmPMu7OnkIKGPKDzXKIiuGlSvHHapwqMUF2QnnsWwtueFZtSyZEilP0o6iUeHQwpn7LxtLUw==",
- "dev": true,
- "dependencies": {
- "browserslist": "^4.20.2",
- "semver": "7.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/core-js"
- }
- },
- "node_modules/core-js-compat/node_modules/semver": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
- "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
- "dev": true,
- "bin": {
- "semver": "bin/semver.js"
- }
- },
- "node_modules/create-ecdh": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz",
- "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==",
- "dependencies": {
- "bn.js": "^4.1.0",
- "elliptic": "^6.5.3"
- }
- },
- "node_modules/create-ecdh/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/create-hash": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
- "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==",
- "dependencies": {
- "cipher-base": "^1.0.1",
- "inherits": "^2.0.1",
- "md5.js": "^1.3.4",
- "ripemd160": "^2.0.1",
- "sha.js": "^2.4.0"
- }
- },
- "node_modules/create-hmac": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz",
- "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==",
- "dependencies": {
- "cipher-base": "^1.0.3",
- "create-hash": "^1.1.0",
- "inherits": "^2.0.1",
- "ripemd160": "^2.0.0",
- "safe-buffer": "^5.0.1",
- "sha.js": "^2.4.8"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
- "dev": true,
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/crypto-browserify": {
- "version": "3.12.0",
- "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz",
- "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==",
- "dependencies": {
- "browserify-cipher": "^1.0.0",
- "browserify-sign": "^4.0.0",
- "create-ecdh": "^4.0.0",
- "create-hash": "^1.1.0",
- "create-hmac": "^1.1.0",
- "diffie-hellman": "^5.0.0",
- "inherits": "^2.0.1",
- "pbkdf2": "^3.0.3",
- "public-encrypt": "^4.0.0",
- "randombytes": "^2.0.0",
- "randomfill": "^1.0.3"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/d": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz",
- "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==",
- "dependencies": {
- "es5-ext": "^0.10.50",
- "type": "^1.0.1"
- }
- },
- "node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
- "dev": true,
- "dependencies": {
- "ms": "2.1.2"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/deepmerge": {
- "version": "4.2.2",
- "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz",
- "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/define-properties": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz",
- "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==",
- "dev": true,
- "dependencies": {
- "has-property-descriptors": "^1.0.0",
- "object-keys": "^1.1.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/des.js": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz",
- "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==",
- "dependencies": {
- "inherits": "^2.0.1",
- "minimalistic-assert": "^1.0.0"
- }
- },
- "node_modules/diffie-hellman": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz",
- "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==",
- "dependencies": {
- "bn.js": "^4.1.0",
- "miller-rabin": "^4.0.0",
- "randombytes": "^2.0.0"
- }
- },
- "node_modules/diffie-hellman/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/electron-to-chromium": {
- "version": "1.4.123",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.123.tgz",
- "integrity": "sha512-0pHGE53WkYoFbsgwYcVKEpWa6jbzlvkohIEA2CUoZ9b5KC+w/zlMiQHvW/4IBcOh7YoEFqRNavgTk02TBoUTUw==",
- "dev": true
- },
- "node_modules/elliptic": {
- "version": "6.5.4",
- "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
- "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
- "dependencies": {
- "bn.js": "^4.11.9",
- "brorand": "^1.1.0",
- "hash.js": "^1.0.0",
- "hmac-drbg": "^1.0.1",
- "inherits": "^2.0.4",
- "minimalistic-assert": "^1.0.1",
- "minimalistic-crypto-utils": "^1.0.1"
- }
- },
- "node_modules/elliptic/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/enhanced-resolve": {
- "version": "5.9.3",
- "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz",
- "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==",
- "dev": true,
- "dependencies": {
- "graceful-fs": "^4.2.4",
- "tapable": "^2.2.0"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/envinfo": {
- "version": "7.8.1",
- "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz",
- "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==",
- "dev": true,
- "bin": {
- "envinfo": "dist/cli.js"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/es-module-lexer": {
- "version": "0.9.3",
- "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
- "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
- "dev": true
- },
- "node_modules/es5-ext": {
- "version": "0.10.61",
- "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.61.tgz",
- "integrity": "sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA==",
- "hasInstallScript": true,
- "dependencies": {
- "es6-iterator": "^2.0.3",
- "es6-symbol": "^3.1.3",
- "next-tick": "^1.1.0"
- },
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/es6-iterator": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz",
- "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=",
- "dependencies": {
- "d": "1",
- "es5-ext": "^0.10.35",
- "es6-symbol": "^3.1.1"
- }
- },
- "node_modules/es6-symbol": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz",
- "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==",
- "dependencies": {
- "d": "^1.0.1",
- "ext": "^1.1.2"
- }
- },
- "node_modules/escalade": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
- "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/escape-string-regexp": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
- "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
- "dev": true,
- "engines": {
- "node": ">=0.8.0"
- }
- },
- "node_modules/eslint-scope": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
- "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
- "dev": true,
- "dependencies": {
- "esrecurse": "^4.3.0",
- "estraverse": "^4.1.1"
- },
- "engines": {
- "node": ">=8.0.0"
- }
- },
- "node_modules/esrecurse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
- "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
- "dev": true,
- "dependencies": {
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/esrecurse/node_modules/estraverse": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
- "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
- "dev": true,
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estraverse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
- "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
- "dev": true,
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estree-walker": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
- "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
- "dev": true
- },
- "node_modules/esutils": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
- "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/events": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
- "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
- "dev": true,
- "engines": {
- "node": ">=0.8.x"
- }
- },
- "node_modules/evp_bytestokey": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz",
- "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==",
- "dependencies": {
- "md5.js": "^1.3.4",
- "safe-buffer": "^5.1.1"
- }
- },
- "node_modules/ext": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz",
- "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==",
- "dependencies": {
- "type": "^2.5.0"
- }
- },
- "node_modules/ext/node_modules/type": {
- "version": "2.6.0",
- "resolved": "https://registry.npmjs.org/type/-/type-2.6.0.tgz",
- "integrity": "sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ=="
- },
- "node_modules/fast-deep-equal": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "dev": true
- },
- "node_modules/fast-json-stable-stringify": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "dev": true
- },
- "node_modules/fastest-levenshtein": {
- "version": "1.0.12",
- "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz",
- "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==",
- "dev": true
- },
- "node_modules/find-up": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
- "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
- "dev": true,
- "dependencies": {
- "locate-path": "^5.0.0",
- "path-exists": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/fsevents": {
- "version": "2.3.2",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
- "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
- "dev": true,
- "hasInstallScript": true,
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/function-bind": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
- "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
- "dev": true
- },
- "node_modules/gensync": {
- "version": "1.0.0-beta.2",
- "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
- "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
- "dev": true,
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/get-intrinsic": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
- "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
- "dev": true,
- "dependencies": {
- "function-bind": "^1.1.1",
- "has": "^1.0.3",
- "has-symbols": "^1.0.1"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/glob-to-regexp": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
- "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
- "dev": true
- },
- "node_modules/globals": {
- "version": "11.12.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
- "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/graceful-fs": {
- "version": "4.2.10",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
- "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
- "dev": true
- },
- "node_modules/has": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
- "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
- "dev": true,
- "dependencies": {
- "function-bind": "^1.1.1"
- },
- "engines": {
- "node": ">= 0.4.0"
- }
- },
- "node_modules/has-flag": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
- "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/has-property-descriptors": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz",
- "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==",
- "dev": true,
- "dependencies": {
- "get-intrinsic": "^1.1.1"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-symbols": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
- "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
- "dev": true,
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hash-base": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz",
- "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==",
- "dependencies": {
- "inherits": "^2.0.4",
- "readable-stream": "^3.6.0",
- "safe-buffer": "^5.2.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/hash-base/node_modules/safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/hash.js": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz",
- "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==",
- "dependencies": {
- "inherits": "^2.0.3",
- "minimalistic-assert": "^1.0.1"
- }
- },
- "node_modules/hmac-drbg": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz",
- "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==",
- "dependencies": {
- "hash.js": "^1.0.3",
- "minimalistic-assert": "^1.0.0",
- "minimalistic-crypto-utils": "^1.0.1"
- }
- },
- "node_modules/import-local": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz",
- "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==",
- "dev": true,
- "dependencies": {
- "pkg-dir": "^4.2.0",
- "resolve-cwd": "^3.0.0"
- },
- "bin": {
- "import-local-fixture": "fixtures/cli.js"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
- },
- "node_modules/interpret": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz",
- "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==",
- "dev": true,
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/is-core-module": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz",
- "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==",
- "dev": true,
- "dependencies": {
- "has": "^1.0.3"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/is-module": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz",
- "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=",
- "dev": true
- },
- "node_modules/is-plain-object": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
- "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
- "dev": true,
- "dependencies": {
- "isobject": "^3.0.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-reference": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz",
- "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==",
- "dev": true,
- "dependencies": {
- "@types/estree": "*"
- }
- },
- "node_modules/is-typedarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
- "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true
- },
- "node_modules/isobject": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
- "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/jest-worker": {
- "version": "27.5.1",
- "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz",
- "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==",
- "dev": true,
- "dependencies": {
- "@types/node": "*",
- "merge-stream": "^2.0.0",
- "supports-color": "^8.0.0"
- },
- "engines": {
- "node": ">= 10.13.0"
- }
- },
- "node_modules/jest-worker/node_modules/has-flag": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
- "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/jest-worker/node_modules/supports-color": {
- "version": "8.1.1",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
- "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
- "dev": true,
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/supports-color?sponsor=1"
- }
- },
- "node_modules/js-tokens": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
- "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
- "dev": true
- },
- "node_modules/jsesc": {
- "version": "2.5.2",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz",
- "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==",
- "dev": true,
- "bin": {
- "jsesc": "bin/jsesc"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/json-parse-even-better-errors": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
- "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
- "dev": true
- },
- "node_modules/json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true
- },
- "node_modules/json5": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz",
- "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==",
- "dev": true,
- "bin": {
- "json5": "lib/cli.js"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/kind-of": {
- "version": "6.0.3",
- "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
- "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/loader-runner": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz",
- "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==",
- "dev": true,
- "engines": {
- "node": ">=6.11.5"
- }
- },
- "node_modules/locate-path": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
- "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
- "dev": true,
- "dependencies": {
- "p-locate": "^4.1.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/lodash": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
- },
- "node_modules/lodash.debounce": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
- "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=",
- "dev": true
- },
- "node_modules/lodash.sortby": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
- "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=",
- "dev": true
- },
- "node_modules/magic-string": {
- "version": "0.25.9",
- "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz",
- "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==",
- "dev": true,
- "dependencies": {
- "sourcemap-codec": "^1.4.8"
- }
- },
- "node_modules/md5.js": {
- "version": "1.3.5",
- "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz",
- "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==",
- "dependencies": {
- "hash-base": "^3.0.0",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.1.2"
- }
- },
- "node_modules/merge-stream": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
- "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
- "dev": true
- },
- "node_modules/miller-rabin": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz",
- "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==",
- "dependencies": {
- "bn.js": "^4.0.0",
- "brorand": "^1.0.1"
- },
- "bin": {
- "miller-rabin": "bin/miller-rabin"
- }
- },
- "node_modules/miller-rabin/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "dev": true,
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "dev": true,
- "dependencies": {
- "mime-db": "1.52.0"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/minimalistic-assert": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz",
- "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A=="
- },
- "node_modules/minimalistic-crypto-utils": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz",
- "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg=="
- },
- "node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
- "dev": true
- },
- "node_modules/neo-async": {
- "version": "2.6.2",
- "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
- "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
- "dev": true
- },
- "node_modules/next-tick": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz",
- "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ=="
- },
- "node_modules/node-gyp-build": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.4.0.tgz",
- "integrity": "sha512-amJnQCcgtRVw9SvoebO3BKGESClrfXGCUTX9hSn1OuGQTQBOZmVd0Z0OlecpuRksKvbsUqALE8jls/ErClAPuQ==",
- "bin": {
- "node-gyp-build": "bin.js",
- "node-gyp-build-optional": "optional.js",
- "node-gyp-build-test": "build-test.js"
- }
- },
- "node_modules/node-releases": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.3.tgz",
- "integrity": "sha512-maHFz6OLqYxz+VQyCAtA3PTX4UP/53pa05fyDNc9CwjvJ0yEh6+xBwKsgCxMNhS8taUKBFYxfuiaD9U/55iFaw==",
- "dev": true
- },
- "node_modules/object-keys": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
- "dev": true,
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/object.assign": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
- "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==",
- "dev": true,
- "dependencies": {
- "call-bind": "^1.0.0",
- "define-properties": "^1.1.3",
- "has-symbols": "^1.0.1",
- "object-keys": "^1.1.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/p-limit": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
- "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
- "dev": true,
- "dependencies": {
- "p-try": "^2.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/p-locate": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
- "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
- "dev": true,
- "dependencies": {
- "p-limit": "^2.2.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/p-try": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
- "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/parse-asn1": {
- "version": "5.1.6",
- "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz",
- "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==",
- "dependencies": {
- "asn1.js": "^5.2.0",
- "browserify-aes": "^1.0.0",
- "evp_bytestokey": "^1.0.0",
- "pbkdf2": "^3.0.3",
- "safe-buffer": "^5.1.1"
- }
- },
- "node_modules/path-exists": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-parse": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
- "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
- "dev": true
- },
- "node_modules/pbkdf2": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz",
- "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==",
- "dependencies": {
- "create-hash": "^1.1.2",
- "create-hmac": "^1.1.4",
- "ripemd160": "^2.0.1",
- "safe-buffer": "^5.0.1",
- "sha.js": "^2.4.8"
- },
- "engines": {
- "node": ">=0.12"
- }
- },
- "node_modules/picocolors": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
- "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
- "dev": true
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "dev": true,
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/pkg-dir": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
- "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
- "dev": true,
- "dependencies": {
- "find-up": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/public-encrypt": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz",
- "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==",
- "dependencies": {
- "bn.js": "^4.1.0",
- "browserify-rsa": "^4.0.0",
- "create-hash": "^1.1.0",
- "parse-asn1": "^5.0.0",
- "randombytes": "^2.0.1",
- "safe-buffer": "^5.1.2"
- }
- },
- "node_modules/public-encrypt/node_modules/bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- },
- "node_modules/punycode": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
- "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/randombytes": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
- "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
- "dependencies": {
- "safe-buffer": "^5.1.0"
- }
- },
- "node_modules/randomfill": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz",
- "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==",
- "dependencies": {
- "randombytes": "^2.0.5",
- "safe-buffer": "^5.1.0"
- }
- },
- "node_modules/readable-stream": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
- "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/rechoir": {
- "version": "0.7.1",
- "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz",
- "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==",
- "dev": true,
- "dependencies": {
- "resolve": "^1.9.0"
- },
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/regenerate": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz",
- "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==",
- "dev": true
- },
- "node_modules/regenerate-unicode-properties": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz",
- "integrity": "sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==",
- "dev": true,
- "dependencies": {
- "regenerate": "^1.4.2"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/regenerator-runtime": {
- "version": "0.13.9",
- "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
- "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==",
- "dev": true
- },
- "node_modules/regenerator-transform": {
- "version": "0.15.0",
- "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz",
- "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==",
- "dev": true,
- "dependencies": {
- "@babel/runtime": "^7.8.4"
- }
- },
- "node_modules/regexpu-core": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.0.1.tgz",
- "integrity": "sha512-CriEZlrKK9VJw/xQGJpQM5rY88BtuL8DM+AEwvcThHilbxiTAy8vq4iJnd2tqq8wLmjbGZzP7ZcKFjbGkmEFrw==",
- "dev": true,
- "dependencies": {
- "regenerate": "^1.4.2",
- "regenerate-unicode-properties": "^10.0.1",
- "regjsgen": "^0.6.0",
- "regjsparser": "^0.8.2",
- "unicode-match-property-ecmascript": "^2.0.0",
- "unicode-match-property-value-ecmascript": "^2.0.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/regjsgen": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz",
- "integrity": "sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==",
- "dev": true
- },
- "node_modules/regjsparser": {
- "version": "0.8.4",
- "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz",
- "integrity": "sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==",
- "dev": true,
- "dependencies": {
- "jsesc": "~0.5.0"
- },
- "bin": {
- "regjsparser": "bin/parser"
- }
- },
- "node_modules/regjsparser/node_modules/jsesc": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz",
- "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=",
- "dev": true,
- "bin": {
- "jsesc": "bin/jsesc"
- }
- },
- "node_modules/resolve": {
- "version": "1.22.0",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz",
- "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==",
- "dev": true,
- "dependencies": {
- "is-core-module": "^2.8.1",
- "path-parse": "^1.0.7",
- "supports-preserve-symlinks-flag": "^1.0.0"
- },
- "bin": {
- "resolve": "bin/resolve"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/resolve-cwd": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz",
- "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==",
- "dev": true,
- "dependencies": {
- "resolve-from": "^5.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/resolve-from": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
- "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/ripemd160": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz",
- "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==",
- "dependencies": {
- "hash-base": "^3.0.0",
- "inherits": "^2.0.1"
- }
- },
- "node_modules/rollup": {
- "version": "2.70.2",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.70.2.tgz",
- "integrity": "sha512-EitogNZnfku65I1DD5Mxe8JYRUCy0hkK5X84IlDtUs+O6JRMpRciXTzyCUuX11b5L5pvjH+OmFXiQ3XjabcXgg==",
- "dev": true,
- "bin": {
- "rollup": "dist/bin/rollup"
- },
- "engines": {
- "node": ">=10.0.0"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/rollup-plugin-babel": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-babel/-/rollup-plugin-babel-4.4.0.tgz",
- "integrity": "sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw==",
- "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-babel.",
- "dev": true,
- "dependencies": {
- "@babel/helper-module-imports": "^7.0.0",
- "rollup-pluginutils": "^2.8.1"
- },
- "peerDependencies": {
- "@babel/core": "7 || ^7.0.0-rc.2",
- "rollup": ">=0.60.0 <3"
- }
- },
- "node_modules/rollup-plugin-commonjs": {
- "version": "10.1.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz",
- "integrity": "sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==",
- "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-commonjs.",
- "dev": true,
- "dependencies": {
- "estree-walker": "^0.6.1",
- "is-reference": "^1.1.2",
- "magic-string": "^0.25.2",
- "resolve": "^1.11.0",
- "rollup-pluginutils": "^2.8.1"
- },
- "peerDependencies": {
- "rollup": ">=1.12.0"
- }
- },
- "node_modules/rollup-plugin-node-resolve": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz",
- "integrity": "sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==",
- "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-node-resolve.",
- "dev": true,
- "dependencies": {
- "@types/resolve": "0.0.8",
- "builtin-modules": "^3.1.0",
- "is-module": "^1.0.0",
- "resolve": "^1.11.1",
- "rollup-pluginutils": "^2.8.1"
- },
- "peerDependencies": {
- "rollup": ">=1.11.0"
- }
- },
- "node_modules/rollup-pluginutils": {
- "version": "2.8.2",
- "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
- "integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
- "dev": true,
- "dependencies": {
- "estree-walker": "^0.6.1"
- }
- },
- "node_modules/safe-buffer": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
- "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
- },
- "node_modules/safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
- },
- "node_modules/schema-utils": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
- "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
- "dev": true,
- "dependencies": {
- "@types/json-schema": "^7.0.8",
- "ajv": "^6.12.5",
- "ajv-keywords": "^3.5.2"
- },
- "engines": {
- "node": ">= 10.13.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/webpack"
- }
- },
- "node_modules/semver": {
- "version": "6.3.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
- "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
- "dev": true,
- "bin": {
- "semver": "bin/semver.js"
- }
- },
- "node_modules/serialize-javascript": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
- "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
- "dev": true,
- "dependencies": {
- "randombytes": "^2.1.0"
- }
- },
- "node_modules/sha.js": {
- "version": "2.4.11",
- "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz",
- "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==",
- "dependencies": {
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- },
- "bin": {
- "sha.js": "bin.js"
- }
- },
- "node_modules/shallow-clone": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
- "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==",
- "dev": true,
- "dependencies": {
- "kind-of": "^6.0.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/source-map": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
- "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/source-map-support": {
- "version": "0.5.21",
- "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
- "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
- "dev": true,
- "dependencies": {
- "buffer-from": "^1.0.0",
- "source-map": "^0.6.0"
- }
- },
- "node_modules/source-map-support/node_modules/source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/sourcemap-codec": {
- "version": "1.4.8",
- "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz",
- "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==",
- "dev": true
- },
- "node_modules/string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
- "dependencies": {
- "safe-buffer": "~5.2.0"
- }
- },
- "node_modules/string_decoder/node_modules/safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/supports-color": {
- "version": "5.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
- "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
- "dev": true,
- "dependencies": {
- "has-flag": "^3.0.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/supports-preserve-symlinks-flag": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
- "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
- "dev": true,
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/tapable": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz",
- "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/terser": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/terser/-/terser-5.13.0.tgz",
- "integrity": "sha512-sgQ99P+fRBM1jAYzN9RTnD/xEWx/7LZgYTCRgmYriSq1wxxqiQPJgXkkLBBuwySDWJ2PP0PnVQyuf4xLUuH4Ng==",
- "dev": true,
- "dependencies": {
- "acorn": "^8.5.0",
- "commander": "^2.20.0",
- "source-map": "~0.8.0-beta.0",
- "source-map-support": "~0.5.20"
- },
- "bin": {
- "terser": "bin/terser"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/terser-webpack-plugin": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz",
- "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==",
- "dev": true,
- "dependencies": {
- "jest-worker": "^27.4.5",
- "schema-utils": "^3.1.1",
- "serialize-javascript": "^6.0.0",
- "source-map": "^0.6.1",
- "terser": "^5.7.2"
- },
- "engines": {
- "node": ">= 10.13.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/webpack"
- },
- "peerDependencies": {
- "webpack": "^5.1.0"
- },
- "peerDependenciesMeta": {
- "@swc/core": {
- "optional": true
- },
- "esbuild": {
- "optional": true
- },
- "uglify-js": {
- "optional": true
- }
- }
- },
- "node_modules/terser-webpack-plugin/node_modules/source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/terser/node_modules/commander": {
- "version": "2.20.3",
- "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
- "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
- "dev": true
- },
- "node_modules/terser/node_modules/source-map": {
- "version": "0.8.0-beta.0",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz",
- "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==",
- "dev": true,
- "dependencies": {
- "whatwg-url": "^7.0.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/to-fast-properties": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
- "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/tr46": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz",
- "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=",
- "dev": true,
- "dependencies": {
- "punycode": "^2.1.0"
- }
- },
- "node_modules/type": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz",
- "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg=="
- },
- "node_modules/typedarray-to-buffer": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
- "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
- "dependencies": {
- "is-typedarray": "^1.0.0"
- }
- },
- "node_modules/unicode-canonical-property-names-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz",
- "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/unicode-match-property-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz",
- "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==",
- "dev": true,
- "dependencies": {
- "unicode-canonical-property-names-ecmascript": "^2.0.0",
- "unicode-property-aliases-ecmascript": "^2.0.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/unicode-match-property-value-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz",
- "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/unicode-property-aliases-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz",
- "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/uri-js": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dev": true,
- "dependencies": {
- "punycode": "^2.1.0"
- }
- },
- "node_modules/utf-8-validate": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.9.tgz",
- "integrity": "sha512-Yek7dAy0v3Kl0orwMlvi7TPtiCNrdfHNd7Gcc/pLq4BLXqfAmd0J7OWMizUQnTTJsyjKn02mU7anqwfmUP4J8Q==",
- "hasInstallScript": true,
- "dependencies": {
- "node-gyp-build": "^4.3.0"
- },
- "engines": {
- "node": ">=6.14.2"
- }
- },
- "node_modules/util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
- },
- "node_modules/uuid": {
- "version": "8.3.2",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
- "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
- "bin": {
- "uuid": "dist/bin/uuid"
- }
- },
- "node_modules/watchpack": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz",
- "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==",
- "dev": true,
- "dependencies": {
- "glob-to-regexp": "^0.4.1",
- "graceful-fs": "^4.1.2"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/webidl-conversions": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz",
- "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==",
- "dev": true
- },
- "node_modules/webpack": {
- "version": "5.73.0",
- "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.73.0.tgz",
- "integrity": "sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA==",
- "dev": true,
- "dependencies": {
- "@types/eslint-scope": "^3.7.3",
- "@types/estree": "^0.0.51",
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/wasm-edit": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1",
- "acorn": "^8.4.1",
- "acorn-import-assertions": "^1.7.6",
- "browserslist": "^4.14.5",
- "chrome-trace-event": "^1.0.2",
- "enhanced-resolve": "^5.9.3",
- "es-module-lexer": "^0.9.0",
- "eslint-scope": "5.1.1",
- "events": "^3.2.0",
- "glob-to-regexp": "^0.4.1",
- "graceful-fs": "^4.2.9",
- "json-parse-even-better-errors": "^2.3.1",
- "loader-runner": "^4.2.0",
- "mime-types": "^2.1.27",
- "neo-async": "^2.6.2",
- "schema-utils": "^3.1.0",
- "tapable": "^2.1.1",
- "terser-webpack-plugin": "^5.1.3",
- "watchpack": "^2.3.1",
- "webpack-sources": "^3.2.3"
- },
- "bin": {
- "webpack": "bin/webpack.js"
- },
- "engines": {
- "node": ">=10.13.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/webpack"
- },
- "peerDependenciesMeta": {
- "webpack-cli": {
- "optional": true
- }
- }
- },
- "node_modules/webpack-cli": {
- "version": "4.10.0",
- "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.10.0.tgz",
- "integrity": "sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w==",
- "dev": true,
- "dependencies": {
- "@discoveryjs/json-ext": "^0.5.0",
- "@webpack-cli/configtest": "^1.2.0",
- "@webpack-cli/info": "^1.5.0",
- "@webpack-cli/serve": "^1.7.0",
- "colorette": "^2.0.14",
- "commander": "^7.0.0",
- "cross-spawn": "^7.0.3",
- "fastest-levenshtein": "^1.0.12",
- "import-local": "^3.0.2",
- "interpret": "^2.2.0",
- "rechoir": "^0.7.0",
- "webpack-merge": "^5.7.3"
- },
- "bin": {
- "webpack-cli": "bin/cli.js"
- },
- "engines": {
- "node": ">=10.13.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/webpack"
- },
- "peerDependencies": {
- "webpack": "4.x.x || 5.x.x"
- },
- "peerDependenciesMeta": {
- "@webpack-cli/generators": {
- "optional": true
- },
- "@webpack-cli/migrate": {
- "optional": true
- },
- "webpack-bundle-analyzer": {
- "optional": true
- },
- "webpack-dev-server": {
- "optional": true
- }
- }
- },
- "node_modules/webpack-cli/node_modules/commander": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
- "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
- "dev": true,
- "engines": {
- "node": ">= 10"
- }
- },
- "node_modules/webpack-merge": {
- "version": "5.8.0",
- "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz",
- "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==",
- "dev": true,
- "dependencies": {
- "clone-deep": "^4.0.1",
- "wildcard": "^2.0.0"
- },
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/webpack-sources": {
- "version": "3.2.3",
- "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz",
- "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==",
- "dev": true,
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/websocket": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/websocket/-/websocket-1.0.34.tgz",
- "integrity": "sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ==",
- "dependencies": {
- "bufferutil": "^4.0.1",
- "debug": "^2.2.0",
- "es5-ext": "^0.10.50",
- "typedarray-to-buffer": "^3.1.5",
- "utf-8-validate": "^5.0.2",
- "yaeti": "^0.0.6"
- },
- "engines": {
- "node": ">=4.0.0"
- }
- },
- "node_modules/websocket/node_modules/debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "dependencies": {
- "ms": "2.0.0"
- }
- },
- "node_modules/websocket/node_modules/ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
- },
- "node_modules/whatwg-url": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz",
- "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==",
- "dev": true,
- "dependencies": {
- "lodash.sortby": "^4.7.0",
- "tr46": "^1.0.1",
- "webidl-conversions": "^4.0.2"
- }
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/wildcard": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz",
- "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==",
- "dev": true
- },
- "node_modules/yaeti": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/yaeti/-/yaeti-0.0.6.tgz",
- "integrity": "sha1-8m9ITXJoTPQr7ft2lwqhYI+/lXc=",
- "engines": {
- "node": ">=0.10.32"
- }
- }
- },
- "dependencies": {
- "@ampproject/remapping": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz",
- "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==",
- "dev": true,
- "requires": {
- "@jridgewell/gen-mapping": "^0.1.0",
- "@jridgewell/trace-mapping": "^0.3.9"
- }
- },
- "@babel/code-frame": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz",
- "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==",
- "dev": true,
- "requires": {
- "@babel/highlight": "^7.16.7"
- }
- },
- "@babel/compat-data": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.17.7.tgz",
- "integrity": "sha512-p8pdE6j0a29TNGebNm7NzYZWB3xVZJBZ7XGs42uAKzQo8VQ3F0By/cQCtUEABwIqw5zo6WA4NbmxsfzADzMKnQ==",
- "dev": true
- },
- "@babel/core": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.17.9.tgz",
- "integrity": "sha512-5ug+SfZCpDAkVp9SFIZAzlW18rlzsOcJGaetCjkySnrXXDUw9AR8cDUm1iByTmdWM6yxX6/zycaV76w3YTF2gw==",
- "dev": true,
- "requires": {
- "@ampproject/remapping": "^2.1.0",
- "@babel/code-frame": "^7.16.7",
- "@babel/generator": "^7.17.9",
- "@babel/helper-compilation-targets": "^7.17.7",
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helpers": "^7.17.9",
- "@babel/parser": "^7.17.9",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.9",
- "@babel/types": "^7.17.0",
- "convert-source-map": "^1.7.0",
- "debug": "^4.1.0",
- "gensync": "^1.0.0-beta.2",
- "json5": "^2.2.1",
- "semver": "^6.3.0"
- }
- },
- "@babel/generator": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.17.9.tgz",
- "integrity": "sha512-rAdDousTwxbIxbz5I7GEQ3lUip+xVCXooZNbsydCWs3xA7ZsYOv+CFRdzGxRX78BmQHu9B1Eso59AOZQOJDEdQ==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.17.0",
- "jsesc": "^2.5.1",
- "source-map": "^0.5.0"
- }
- },
- "@babel/helper-annotate-as-pure": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz",
- "integrity": "sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-builder-binary-assignment-operator-visitor": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz",
- "integrity": "sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA==",
- "dev": true,
- "requires": {
- "@babel/helper-explode-assignable-expression": "^7.16.7",
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-compilation-targets": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.7.tgz",
- "integrity": "sha512-UFzlz2jjd8kroj0hmCFV5zr+tQPi1dpC2cRsDV/3IEW8bJfCPrPpmcSN6ZS8RqIq4LXcmpipCQFPddyFA5Yc7w==",
- "dev": true,
- "requires": {
- "@babel/compat-data": "^7.17.7",
- "@babel/helper-validator-option": "^7.16.7",
- "browserslist": "^4.17.5",
- "semver": "^6.3.0"
- }
- },
- "@babel/helper-create-class-features-plugin": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.17.9.tgz",
- "integrity": "sha512-kUjip3gruz6AJKOq5i3nC6CoCEEF/oHH3cp6tOZhB+IyyyPyW0g1Gfsxn3mkk6S08pIA2y8GQh609v9G/5sHVQ==",
- "dev": true,
- "requires": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.17.9",
- "@babel/helper-member-expression-to-functions": "^7.17.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7"
- }
- },
- "@babel/helper-create-regexp-features-plugin": {
- "version": "7.17.0",
- "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.17.0.tgz",
- "integrity": "sha512-awO2So99wG6KnlE+TPs6rn83gCz5WlEePJDTnLEqbchMVrBeAujURVphRdigsk094VhvZehFoNOihSlcBjwsXA==",
- "dev": true,
- "requires": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "regexpu-core": "^5.0.1"
- }
- },
- "@babel/helper-define-polyfill-provider": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz",
- "integrity": "sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA==",
- "dev": true,
- "requires": {
- "@babel/helper-compilation-targets": "^7.13.0",
- "@babel/helper-module-imports": "^7.12.13",
- "@babel/helper-plugin-utils": "^7.13.0",
- "@babel/traverse": "^7.13.0",
- "debug": "^4.1.1",
- "lodash.debounce": "^4.0.8",
- "resolve": "^1.14.2",
- "semver": "^6.1.2"
- }
- },
- "@babel/helper-environment-visitor": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz",
- "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-explode-assignable-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz",
- "integrity": "sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-function-name": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz",
- "integrity": "sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg==",
- "dev": true,
- "requires": {
- "@babel/template": "^7.16.7",
- "@babel/types": "^7.17.0"
- }
- },
- "@babel/helper-hoist-variables": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz",
- "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-member-expression-to-functions": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.17.7.tgz",
- "integrity": "sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.17.0"
- }
- },
- "@babel/helper-module-imports": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz",
- "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-module-transforms": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.17.7.tgz",
- "integrity": "sha512-VmZD99F3gNTYB7fJRDTi+u6l/zxY0BE6OIxPSU7a50s6ZUQkHwSDmV92FfM+oCG0pZRVojGYhkR8I0OGeCVREw==",
- "dev": true,
- "requires": {
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-module-imports": "^7.16.7",
- "@babel/helper-simple-access": "^7.17.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "@babel/helper-validator-identifier": "^7.16.7",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.3",
- "@babel/types": "^7.17.0"
- }
- },
- "@babel/helper-optimise-call-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz",
- "integrity": "sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-plugin-utils": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz",
- "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==",
- "dev": true
- },
- "@babel/helper-remap-async-to-generator": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz",
- "integrity": "sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw==",
- "dev": true,
- "requires": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-wrap-function": "^7.16.8",
- "@babel/types": "^7.16.8"
- }
- },
- "@babel/helper-replace-supers": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz",
- "integrity": "sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw==",
- "dev": true,
- "requires": {
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-member-expression-to-functions": "^7.16.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/traverse": "^7.16.7",
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-simple-access": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.17.7.tgz",
- "integrity": "sha512-txyMCGroZ96i+Pxr3Je3lzEJjqwaRC9buMUgtomcrLe5Nd0+fk1h0LLA+ixUF5OW7AhHuQ7Es1WcQJZmZsz2XA==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.17.0"
- }
- },
- "@babel/helper-skip-transparent-expression-wrappers": {
- "version": "7.16.0",
- "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz",
- "integrity": "sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.0"
- }
- },
- "@babel/helper-split-export-declaration": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz",
- "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==",
- "dev": true,
- "requires": {
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/helper-validator-identifier": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz",
- "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==",
- "dev": true
- },
- "@babel/helper-validator-option": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz",
- "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==",
- "dev": true
- },
- "@babel/helper-wrap-function": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz",
- "integrity": "sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw==",
- "dev": true,
- "requires": {
- "@babel/helper-function-name": "^7.16.7",
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.16.8",
- "@babel/types": "^7.16.8"
- }
- },
- "@babel/helpers": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.17.9.tgz",
- "integrity": "sha512-cPCt915ShDWUEzEp3+UNRktO2n6v49l5RSnG9M5pS24hA+2FAc5si+Pn1i4VVbQQ+jh+bIZhPFQOJOzbrOYY1Q==",
- "dev": true,
- "requires": {
- "@babel/template": "^7.16.7",
- "@babel/traverse": "^7.17.9",
- "@babel/types": "^7.17.0"
- }
- },
- "@babel/highlight": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.17.9.tgz",
- "integrity": "sha512-J9PfEKCbFIv2X5bjTMiZu6Vf341N05QIY+d6FvVKynkG1S7G0j3I0QoRtWIrXhZ+/Nlb5Q0MzqL7TokEJ5BNHg==",
- "dev": true,
- "requires": {
- "@babel/helper-validator-identifier": "^7.16.7",
- "chalk": "^2.0.0",
- "js-tokens": "^4.0.0"
- }
- },
- "@babel/parser": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.17.9.tgz",
- "integrity": "sha512-vqUSBLP8dQHFPdPi9bc5GK9vRkYHJ49fsZdtoJ8EQ8ibpwk5rPKfvNIwChB0KVXcIjcepEBBd2VHC5r9Gy8ueg==",
- "dev": true
- },
- "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz",
- "integrity": "sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz",
- "integrity": "sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0",
- "@babel/plugin-proposal-optional-chaining": "^7.16.7"
- }
- },
- "@babel/plugin-proposal-async-generator-functions": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz",
- "integrity": "sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-remap-async-to-generator": "^7.16.8",
- "@babel/plugin-syntax-async-generators": "^7.8.4"
- }
- },
- "@babel/plugin-proposal-class-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz",
- "integrity": "sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww==",
- "dev": true,
- "requires": {
- "@babel/helper-create-class-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-proposal-class-static-block": {
- "version": "7.17.6",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.17.6.tgz",
- "integrity": "sha512-X/tididvL2zbs7jZCeeRJ8167U/+Ac135AM6jCAx6gYXDUviZV5Ku9UDvWS2NCuWlFjIRXklYhwo6HhAC7ETnA==",
- "dev": true,
- "requires": {
- "@babel/helper-create-class-features-plugin": "^7.17.6",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-class-static-block": "^7.14.5"
- }
- },
- "@babel/plugin-proposal-dynamic-import": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz",
- "integrity": "sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-dynamic-import": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-export-namespace-from": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz",
- "integrity": "sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-export-namespace-from": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-json-strings": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz",
- "integrity": "sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-json-strings": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-logical-assignment-operators": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz",
- "integrity": "sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4"
- }
- },
- "@babel/plugin-proposal-nullish-coalescing-operator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz",
- "integrity": "sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-numeric-separator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz",
- "integrity": "sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-numeric-separator": "^7.10.4"
- }
- },
- "@babel/plugin-proposal-object-rest-spread": {
- "version": "7.17.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.17.3.tgz",
- "integrity": "sha512-yuL5iQA/TbZn+RGAfxQXfi7CNLmKi1f8zInn4IgobuCWcAb7i+zj4TYzQ9l8cEzVyJ89PDGuqxK1xZpUDISesw==",
- "dev": true,
- "requires": {
- "@babel/compat-data": "^7.17.0",
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
- "@babel/plugin-transform-parameters": "^7.16.7"
- }
- },
- "@babel/plugin-proposal-optional-catch-binding": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz",
- "integrity": "sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-optional-catch-binding": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-optional-chaining": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz",
- "integrity": "sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0",
- "@babel/plugin-syntax-optional-chaining": "^7.8.3"
- }
- },
- "@babel/plugin-proposal-private-methods": {
- "version": "7.16.11",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz",
- "integrity": "sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw==",
- "dev": true,
- "requires": {
- "@babel/helper-create-class-features-plugin": "^7.16.10",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-proposal-private-property-in-object": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz",
- "integrity": "sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ==",
- "dev": true,
- "requires": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-create-class-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/plugin-syntax-private-property-in-object": "^7.14.5"
- }
- },
- "@babel/plugin-proposal-unicode-property-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz",
- "integrity": "sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg==",
- "dev": true,
- "requires": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-syntax-async-generators": {
- "version": "7.8.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz",
- "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-class-properties": {
- "version": "7.12.13",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz",
- "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.12.13"
- }
- },
- "@babel/plugin-syntax-class-static-block": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz",
- "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.14.5"
- }
- },
- "@babel/plugin-syntax-dynamic-import": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz",
- "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-export-namespace-from": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz",
- "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.3"
- }
- },
- "@babel/plugin-syntax-json-strings": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz",
- "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-logical-assignment-operators": {
- "version": "7.10.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz",
- "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.10.4"
- }
- },
- "@babel/plugin-syntax-nullish-coalescing-operator": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz",
- "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-numeric-separator": {
- "version": "7.10.4",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz",
- "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.10.4"
- }
- },
- "@babel/plugin-syntax-object-rest-spread": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz",
- "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-optional-catch-binding": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz",
- "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-optional-chaining": {
- "version": "7.8.3",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz",
- "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.8.0"
- }
- },
- "@babel/plugin-syntax-private-property-in-object": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz",
- "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.14.5"
- }
- },
- "@babel/plugin-syntax-top-level-await": {
- "version": "7.14.5",
- "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz",
- "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.14.5"
- }
- },
- "@babel/plugin-transform-arrow-functions": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz",
- "integrity": "sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-async-to-generator": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz",
- "integrity": "sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg==",
- "dev": true,
- "requires": {
- "@babel/helper-module-imports": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-remap-async-to-generator": "^7.16.8"
- }
- },
- "@babel/plugin-transform-block-scoped-functions": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz",
- "integrity": "sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-block-scoping": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz",
- "integrity": "sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-classes": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz",
- "integrity": "sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ==",
- "dev": true,
- "requires": {
- "@babel/helper-annotate-as-pure": "^7.16.7",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.16.7",
- "@babel/helper-optimise-call-expression": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "globals": "^11.1.0"
- }
- },
- "@babel/plugin-transform-computed-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz",
- "integrity": "sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-destructuring": {
- "version": "7.17.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.17.7.tgz",
- "integrity": "sha512-XVh0r5yq9sLR4vZ6eVZe8FKfIcSgaTBxVBRSYokRj2qksf6QerYnTxz9/GTuKTH/n/HwLP7t6gtlybHetJ/6hQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-dotall-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz",
- "integrity": "sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ==",
- "dev": true,
- "requires": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-duplicate-keys": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz",
- "integrity": "sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-exponentiation-operator": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz",
- "integrity": "sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA==",
- "dev": true,
- "requires": {
- "@babel/helper-builder-binary-assignment-operator-visitor": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-for-of": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz",
- "integrity": "sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-function-name": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz",
- "integrity": "sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA==",
- "dev": true,
- "requires": {
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-function-name": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz",
- "integrity": "sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-member-expression-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz",
- "integrity": "sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-modules-amd": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz",
- "integrity": "sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g==",
- "dev": true,
- "requires": {
- "@babel/helper-module-transforms": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- }
- },
- "@babel/plugin-transform-modules-commonjs": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.17.9.tgz",
- "integrity": "sha512-2TBFd/r2I6VlYn0YRTz2JdazS+FoUuQ2rIFHoAxtyP/0G3D82SBLaRq9rnUkpqlLg03Byfl/+M32mpxjO6KaPw==",
- "dev": true,
- "requires": {
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-simple-access": "^7.17.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- }
- },
- "@babel/plugin-transform-modules-systemjs": {
- "version": "7.17.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.17.8.tgz",
- "integrity": "sha512-39reIkMTUVagzgA5x88zDYXPCMT6lcaRKs1+S9K6NKBPErbgO/w/kP8GlNQTC87b412ZTlmNgr3k2JrWgHH+Bw==",
- "dev": true,
- "requires": {
- "@babel/helper-hoist-variables": "^7.16.7",
- "@babel/helper-module-transforms": "^7.17.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-validator-identifier": "^7.16.7",
- "babel-plugin-dynamic-import-node": "^2.3.3"
- }
- },
- "@babel/plugin-transform-modules-umd": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz",
- "integrity": "sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ==",
- "dev": true,
- "requires": {
- "@babel/helper-module-transforms": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-named-capturing-groups-regex": {
- "version": "7.16.8",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.16.8.tgz",
- "integrity": "sha512-j3Jw+n5PvpmhRR+mrgIh04puSANCk/T/UA3m3P1MjJkhlK906+ApHhDIqBQDdOgL/r1UYpz4GNclTXxyZrYGSw==",
- "dev": true,
- "requires": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7"
- }
- },
- "@babel/plugin-transform-new-target": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz",
- "integrity": "sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-object-super": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz",
- "integrity": "sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-replace-supers": "^7.16.7"
- }
- },
- "@babel/plugin-transform-parameters": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz",
- "integrity": "sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-property-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz",
- "integrity": "sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-regenerator": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.17.9.tgz",
- "integrity": "sha512-Lc2TfbxR1HOyn/c6b4Y/b6NHoTb67n/IoWLxTu4kC7h4KQnWlhCq2S8Tx0t2SVvv5Uu87Hs+6JEJ5kt2tYGylQ==",
- "dev": true,
- "requires": {
- "regenerator-transform": "^0.15.0"
- }
- },
- "@babel/plugin-transform-reserved-words": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz",
- "integrity": "sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-shorthand-properties": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz",
- "integrity": "sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-spread": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz",
- "integrity": "sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-skip-transparent-expression-wrappers": "^7.16.0"
- }
- },
- "@babel/plugin-transform-sticky-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz",
- "integrity": "sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-template-literals": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz",
- "integrity": "sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-typeof-symbol": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz",
- "integrity": "sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-unicode-escapes": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz",
- "integrity": "sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/plugin-transform-unicode-regex": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz",
- "integrity": "sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q==",
- "dev": true,
- "requires": {
- "@babel/helper-create-regexp-features-plugin": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7"
- }
- },
- "@babel/preset-env": {
- "version": "7.16.11",
- "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.16.11.tgz",
- "integrity": "sha512-qcmWG8R7ZW6WBRPZK//y+E3Cli151B20W1Rv7ln27vuPaXU/8TKms6jFdiJtF7UDTxcrb7mZd88tAeK9LjdT8g==",
- "dev": true,
- "requires": {
- "@babel/compat-data": "^7.16.8",
- "@babel/helper-compilation-targets": "^7.16.7",
- "@babel/helper-plugin-utils": "^7.16.7",
- "@babel/helper-validator-option": "^7.16.7",
- "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.16.7",
- "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.16.7",
- "@babel/plugin-proposal-async-generator-functions": "^7.16.8",
- "@babel/plugin-proposal-class-properties": "^7.16.7",
- "@babel/plugin-proposal-class-static-block": "^7.16.7",
- "@babel/plugin-proposal-dynamic-import": "^7.16.7",
- "@babel/plugin-proposal-export-namespace-from": "^7.16.7",
- "@babel/plugin-proposal-json-strings": "^7.16.7",
- "@babel/plugin-proposal-logical-assignment-operators": "^7.16.7",
- "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
- "@babel/plugin-proposal-numeric-separator": "^7.16.7",
- "@babel/plugin-proposal-object-rest-spread": "^7.16.7",
- "@babel/plugin-proposal-optional-catch-binding": "^7.16.7",
- "@babel/plugin-proposal-optional-chaining": "^7.16.7",
- "@babel/plugin-proposal-private-methods": "^7.16.11",
- "@babel/plugin-proposal-private-property-in-object": "^7.16.7",
- "@babel/plugin-proposal-unicode-property-regex": "^7.16.7",
- "@babel/plugin-syntax-async-generators": "^7.8.4",
- "@babel/plugin-syntax-class-properties": "^7.12.13",
- "@babel/plugin-syntax-class-static-block": "^7.14.5",
- "@babel/plugin-syntax-dynamic-import": "^7.8.3",
- "@babel/plugin-syntax-export-namespace-from": "^7.8.3",
- "@babel/plugin-syntax-json-strings": "^7.8.3",
- "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4",
- "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3",
- "@babel/plugin-syntax-numeric-separator": "^7.10.4",
- "@babel/plugin-syntax-object-rest-spread": "^7.8.3",
- "@babel/plugin-syntax-optional-catch-binding": "^7.8.3",
- "@babel/plugin-syntax-optional-chaining": "^7.8.3",
- "@babel/plugin-syntax-private-property-in-object": "^7.14.5",
- "@babel/plugin-syntax-top-level-await": "^7.14.5",
- "@babel/plugin-transform-arrow-functions": "^7.16.7",
- "@babel/plugin-transform-async-to-generator": "^7.16.8",
- "@babel/plugin-transform-block-scoped-functions": "^7.16.7",
- "@babel/plugin-transform-block-scoping": "^7.16.7",
- "@babel/plugin-transform-classes": "^7.16.7",
- "@babel/plugin-transform-computed-properties": "^7.16.7",
- "@babel/plugin-transform-destructuring": "^7.16.7",
- "@babel/plugin-transform-dotall-regex": "^7.16.7",
- "@babel/plugin-transform-duplicate-keys": "^7.16.7",
- "@babel/plugin-transform-exponentiation-operator": "^7.16.7",
- "@babel/plugin-transform-for-of": "^7.16.7",
- "@babel/plugin-transform-function-name": "^7.16.7",
- "@babel/plugin-transform-literals": "^7.16.7",
- "@babel/plugin-transform-member-expression-literals": "^7.16.7",
- "@babel/plugin-transform-modules-amd": "^7.16.7",
- "@babel/plugin-transform-modules-commonjs": "^7.16.8",
- "@babel/plugin-transform-modules-systemjs": "^7.16.7",
- "@babel/plugin-transform-modules-umd": "^7.16.7",
- "@babel/plugin-transform-named-capturing-groups-regex": "^7.16.8",
- "@babel/plugin-transform-new-target": "^7.16.7",
- "@babel/plugin-transform-object-super": "^7.16.7",
- "@babel/plugin-transform-parameters": "^7.16.7",
- "@babel/plugin-transform-property-literals": "^7.16.7",
- "@babel/plugin-transform-regenerator": "^7.16.7",
- "@babel/plugin-transform-reserved-words": "^7.16.7",
- "@babel/plugin-transform-shorthand-properties": "^7.16.7",
- "@babel/plugin-transform-spread": "^7.16.7",
- "@babel/plugin-transform-sticky-regex": "^7.16.7",
- "@babel/plugin-transform-template-literals": "^7.16.7",
- "@babel/plugin-transform-typeof-symbol": "^7.16.7",
- "@babel/plugin-transform-unicode-escapes": "^7.16.7",
- "@babel/plugin-transform-unicode-regex": "^7.16.7",
- "@babel/preset-modules": "^0.1.5",
- "@babel/types": "^7.16.8",
- "babel-plugin-polyfill-corejs2": "^0.3.0",
- "babel-plugin-polyfill-corejs3": "^0.5.0",
- "babel-plugin-polyfill-regenerator": "^0.3.0",
- "core-js-compat": "^3.20.2",
- "semver": "^6.3.0"
- }
- },
- "@babel/preset-modules": {
- "version": "0.1.5",
- "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz",
- "integrity": "sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==",
- "dev": true,
- "requires": {
- "@babel/helper-plugin-utils": "^7.0.0",
- "@babel/plugin-proposal-unicode-property-regex": "^7.4.4",
- "@babel/plugin-transform-dotall-regex": "^7.4.4",
- "@babel/types": "^7.4.4",
- "esutils": "^2.0.2"
- }
- },
- "@babel/runtime": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.9.tgz",
- "integrity": "sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg==",
- "dev": true,
- "requires": {
- "regenerator-runtime": "^0.13.4"
- }
- },
- "@babel/template": {
- "version": "7.16.7",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz",
- "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==",
- "dev": true,
- "requires": {
- "@babel/code-frame": "^7.16.7",
- "@babel/parser": "^7.16.7",
- "@babel/types": "^7.16.7"
- }
- },
- "@babel/traverse": {
- "version": "7.17.9",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.17.9.tgz",
- "integrity": "sha512-PQO8sDIJ8SIwipTPiR71kJQCKQYB5NGImbOviK8K+kg5xkNSYXLBupuX9QhatFowrsvo9Hj8WgArg3W7ijNAQw==",
- "dev": true,
- "requires": {
- "@babel/code-frame": "^7.16.7",
- "@babel/generator": "^7.17.9",
- "@babel/helper-environment-visitor": "^7.16.7",
- "@babel/helper-function-name": "^7.17.9",
- "@babel/helper-hoist-variables": "^7.16.7",
- "@babel/helper-split-export-declaration": "^7.16.7",
- "@babel/parser": "^7.17.9",
- "@babel/types": "^7.17.0",
- "debug": "^4.1.0",
- "globals": "^11.1.0"
- }
- },
- "@babel/types": {
- "version": "7.17.0",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.17.0.tgz",
- "integrity": "sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==",
- "dev": true,
- "requires": {
- "@babel/helper-validator-identifier": "^7.16.7",
- "to-fast-properties": "^2.0.0"
- }
- },
- "@discoveryjs/json-ext": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz",
- "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==",
- "dev": true
- },
- "@jridgewell/gen-mapping": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.0.tgz",
- "integrity": "sha512-YH+BnkvuCiPR+MUOY6JIArdTIGrRtsxnLaIxPRy4CpGJ/V6OO6Gq/1J+FJEc4j5e5h6Bcy3/K7prlMrm93BJoA==",
- "dev": true,
- "requires": {
- "@jridgewell/set-array": "1.0.0",
- "@jridgewell/sourcemap-codec": "^1.4.10"
- }
- },
- "@jridgewell/resolve-uri": {
- "version": "3.0.6",
- "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.6.tgz",
- "integrity": "sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==",
- "dev": true
- },
- "@jridgewell/set-array": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.0.0.tgz",
- "integrity": "sha512-LcqVnHCjOAj8BTCtjpwYZCMTn4yArusbdObCVRUYvBHhrR5fVLVyENG+UVWM4T4H/ufv7NiBLdprllxWs/5PaQ==",
- "dev": true
- },
- "@jridgewell/sourcemap-codec": {
- "version": "1.4.11",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz",
- "integrity": "sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==",
- "dev": true
- },
- "@jridgewell/trace-mapping": {
- "version": "0.3.9",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
- "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
- "dev": true,
- "requires": {
- "@jridgewell/resolve-uri": "^3.0.3",
- "@jridgewell/sourcemap-codec": "^1.4.10"
- }
- },
- "@rollup/plugin-node-resolve": {
- "version": "13.2.1",
- "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-13.2.1.tgz",
- "integrity": "sha512-btX7kzGvp1JwShQI9V6IM841YKNPYjKCvUbNrQ2EcVYbULtUd/GH6wZ/qdqH13j9pOHBER+EZXNN2L8RSJhVRA==",
- "dev": true,
- "requires": {
- "@rollup/pluginutils": "^3.1.0",
- "@types/resolve": "1.17.1",
- "builtin-modules": "^3.1.0",
- "deepmerge": "^4.2.2",
- "is-module": "^1.0.0",
- "resolve": "^1.19.0"
- },
- "dependencies": {
- "@types/resolve": {
- "version": "1.17.1",
- "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.17.1.tgz",
- "integrity": "sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw==",
- "dev": true,
- "requires": {
- "@types/node": "*"
- }
- }
- }
- },
- "@rollup/pluginutils": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.1.0.tgz",
- "integrity": "sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==",
- "dev": true,
- "requires": {
- "@types/estree": "0.0.39",
- "estree-walker": "^1.0.1",
- "picomatch": "^2.2.2"
- },
- "dependencies": {
- "@types/estree": {
- "version": "0.0.39",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz",
- "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==",
- "dev": true
- },
- "estree-walker": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz",
- "integrity": "sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==",
- "dev": true
- }
- }
- },
- "@stomp/stompjs": {
- "version": "6.1.2",
- "resolved": "https://registry.npmjs.org/@stomp/stompjs/-/stompjs-6.1.2.tgz",
- "integrity": "sha512-FHDTrIFM5Ospi4L3Xhj6v2+NzCVAeNDcBe95YjUWhWiRMrBF6uN3I7AUOlRgT6jU/2WQvvYK8ZaIxFfxFp+uHQ=="
- },
- "@types/eslint": {
- "version": "8.4.1",
- "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz",
- "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==",
- "dev": true,
- "requires": {
- "@types/estree": "*",
- "@types/json-schema": "*"
- }
- },
- "@types/eslint-scope": {
- "version": "3.7.3",
- "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz",
- "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==",
- "dev": true,
- "requires": {
- "@types/eslint": "*",
- "@types/estree": "*"
- }
- },
- "@types/estree": {
- "version": "0.0.51",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz",
- "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==",
- "dev": true
- },
- "@types/json-schema": {
- "version": "7.0.11",
- "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz",
- "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==",
- "dev": true
- },
- "@types/node": {
- "version": "17.0.29",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.29.tgz",
- "integrity": "sha512-tx5jMmMFwx7wBwq/V7OohKDVb/JwJU5qCVkeLMh1//xycAJ/ESuw9aJ9SEtlCZDYi2pBfe4JkisSoAtbOsBNAA==",
- "dev": true
- },
- "@types/resolve": {
- "version": "0.0.8",
- "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz",
- "integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==",
- "dev": true,
- "requires": {
- "@types/node": "*"
- }
- },
- "@webassemblyjs/ast": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
- "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
- "dev": true,
- "requires": {
- "@webassemblyjs/helper-numbers": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1"
- }
- },
- "@webassemblyjs/floating-point-hex-parser": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
- "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
- "dev": true
- },
- "@webassemblyjs/helper-api-error": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
- "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
- "dev": true
- },
- "@webassemblyjs/helper-buffer": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
- "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
- "dev": true
- },
- "@webassemblyjs/helper-numbers": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
- "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
- "dev": true,
- "requires": {
- "@webassemblyjs/floating-point-hex-parser": "1.11.1",
- "@webassemblyjs/helper-api-error": "1.11.1",
- "@xtuc/long": "4.2.2"
- }
- },
- "@webassemblyjs/helper-wasm-bytecode": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
- "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
- "dev": true
- },
- "@webassemblyjs/helper-wasm-section": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
- "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1"
- }
- },
- "@webassemblyjs/ieee754": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
- "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
- "dev": true,
- "requires": {
- "@xtuc/ieee754": "^1.2.0"
- }
- },
- "@webassemblyjs/leb128": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
- "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
- "dev": true,
- "requires": {
- "@xtuc/long": "4.2.2"
- }
- },
- "@webassemblyjs/utf8": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
- "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
- "dev": true
- },
- "@webassemblyjs/wasm-edit": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
- "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/helper-wasm-section": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1",
- "@webassemblyjs/wasm-opt": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1",
- "@webassemblyjs/wast-printer": "1.11.1"
- }
- },
- "@webassemblyjs/wasm-gen": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
- "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/ieee754": "1.11.1",
- "@webassemblyjs/leb128": "1.11.1",
- "@webassemblyjs/utf8": "1.11.1"
- }
- },
- "@webassemblyjs/wasm-opt": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
- "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-buffer": "1.11.1",
- "@webassemblyjs/wasm-gen": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1"
- }
- },
- "@webassemblyjs/wasm-parser": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
- "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/helper-api-error": "1.11.1",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
- "@webassemblyjs/ieee754": "1.11.1",
- "@webassemblyjs/leb128": "1.11.1",
- "@webassemblyjs/utf8": "1.11.1"
- }
- },
- "@webassemblyjs/wast-printer": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
- "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
- "dev": true,
- "requires": {
- "@webassemblyjs/ast": "1.11.1",
- "@xtuc/long": "4.2.2"
- }
- },
- "@webpack-cli/configtest": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.2.0.tgz",
- "integrity": "sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg==",
- "dev": true,
- "requires": {}
- },
- "@webpack-cli/info": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.5.0.tgz",
- "integrity": "sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ==",
- "dev": true,
- "requires": {
- "envinfo": "^7.7.3"
- }
- },
- "@webpack-cli/serve": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.7.0.tgz",
- "integrity": "sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q==",
- "dev": true,
- "requires": {}
- },
- "@xtuc/ieee754": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz",
- "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==",
- "dev": true
- },
- "@xtuc/long": {
- "version": "4.2.2",
- "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz",
- "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==",
- "dev": true
- },
- "acorn": {
- "version": "8.7.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
- "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
- "dev": true
- },
- "acorn-import-assertions": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz",
- "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==",
- "dev": true,
- "requires": {}
- },
- "ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "requires": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- }
- },
- "ajv-keywords": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz",
- "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==",
- "dev": true,
- "requires": {}
- },
- "ansi-styles": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
- "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
- "dev": true,
- "requires": {
- "color-convert": "^1.9.0"
- }
- },
- "asn1.js": {
- "version": "5.4.1",
- "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz",
- "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==",
- "requires": {
- "bn.js": "^4.0.0",
- "inherits": "^2.0.1",
- "minimalistic-assert": "^1.0.0",
- "safer-buffer": "^2.1.0"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "babel-plugin-dynamic-import-node": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz",
- "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==",
- "dev": true,
- "requires": {
- "object.assign": "^4.1.0"
- }
- },
- "babel-plugin-polyfill-corejs2": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz",
- "integrity": "sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w==",
- "dev": true,
- "requires": {
- "@babel/compat-data": "^7.13.11",
- "@babel/helper-define-polyfill-provider": "^0.3.1",
- "semver": "^6.1.1"
- }
- },
- "babel-plugin-polyfill-corejs3": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz",
- "integrity": "sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ==",
- "dev": true,
- "requires": {
- "@babel/helper-define-polyfill-provider": "^0.3.1",
- "core-js-compat": "^3.21.0"
- }
- },
- "babel-plugin-polyfill-regenerator": {
- "version": "0.3.1",
- "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz",
- "integrity": "sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A==",
- "dev": true,
- "requires": {
- "@babel/helper-define-polyfill-provider": "^0.3.1"
- }
- },
- "bn.js": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz",
- "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ=="
- },
- "brorand": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz",
- "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w=="
- },
- "browserify-aes": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz",
- "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==",
- "requires": {
- "buffer-xor": "^1.0.3",
- "cipher-base": "^1.0.0",
- "create-hash": "^1.1.0",
- "evp_bytestokey": "^1.0.3",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- }
- },
- "browserify-cipher": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz",
- "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==",
- "requires": {
- "browserify-aes": "^1.0.4",
- "browserify-des": "^1.0.0",
- "evp_bytestokey": "^1.0.0"
- }
- },
- "browserify-des": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz",
- "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==",
- "requires": {
- "cipher-base": "^1.0.1",
- "des.js": "^1.0.0",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.1.2"
- }
- },
- "browserify-rsa": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz",
- "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==",
- "requires": {
- "bn.js": "^5.0.0",
- "randombytes": "^2.0.1"
- }
- },
- "browserify-sign": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz",
- "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==",
- "requires": {
- "bn.js": "^5.1.1",
- "browserify-rsa": "^4.0.1",
- "create-hash": "^1.2.0",
- "create-hmac": "^1.1.7",
- "elliptic": "^6.5.3",
- "inherits": "^2.0.4",
- "parse-asn1": "^5.1.5",
- "readable-stream": "^3.6.0",
- "safe-buffer": "^5.2.0"
- },
- "dependencies": {
- "safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
- }
- }
- },
- "browserslist": {
- "version": "4.20.3",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz",
- "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==",
- "dev": true,
- "requires": {
- "caniuse-lite": "^1.0.30001332",
- "electron-to-chromium": "^1.4.118",
- "escalade": "^3.1.1",
- "node-releases": "^2.0.3",
- "picocolors": "^1.0.0"
- }
- },
- "buffer-from": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
- "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
- "dev": true
- },
- "buffer-xor": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz",
- "integrity": "sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ=="
- },
- "bufferutil": {
- "version": "4.0.6",
- "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.6.tgz",
- "integrity": "sha512-jduaYOYtnio4aIAyc6UbvPCVcgq7nYpVnucyxr6eCYg/Woad9Hf/oxxBRDnGGjPfjUm6j5O/uBWhIu4iLebFaw==",
- "requires": {
- "node-gyp-build": "^4.3.0"
- }
- },
- "builtin-modules": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz",
- "integrity": "sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==",
- "dev": true
- },
- "call-bind": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
- "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
- "dev": true,
- "requires": {
- "function-bind": "^1.1.1",
- "get-intrinsic": "^1.0.2"
- }
- },
- "caniuse-lite": {
- "version": "1.0.30001332",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001332.tgz",
- "integrity": "sha512-10T30NYOEQtN6C11YGg411yebhvpnC6Z102+B95eAsN0oB6KUs01ivE8u+G6FMIRtIrVlYXhL+LUwQ3/hXwDWw==",
- "dev": true
- },
- "chalk": {
- "version": "2.4.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
- "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
- "dev": true,
- "requires": {
- "ansi-styles": "^3.2.1",
- "escape-string-regexp": "^1.0.5",
- "supports-color": "^5.3.0"
- }
- },
- "chrome-trace-event": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz",
- "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==",
- "dev": true
- },
- "cipher-base": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz",
- "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==",
- "requires": {
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- }
- },
- "clone-deep": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz",
- "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==",
- "dev": true,
- "requires": {
- "is-plain-object": "^2.0.4",
- "kind-of": "^6.0.2",
- "shallow-clone": "^3.0.0"
- }
- },
- "color-convert": {
- "version": "1.9.3",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
- "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
- "dev": true,
- "requires": {
- "color-name": "1.1.3"
- }
- },
- "color-name": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
- "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
- "dev": true
- },
- "colorette": {
- "version": "2.0.16",
- "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz",
- "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==",
- "dev": true
- },
- "convert-source-map": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz",
- "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==",
- "dev": true,
- "requires": {
- "safe-buffer": "~5.1.1"
- }
- },
- "core-js-compat": {
- "version": "3.22.2",
- "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.22.2.tgz",
- "integrity": "sha512-Fns9lU06ZJ07pdfmPMu7OnkIKGPKDzXKIiuGlSvHHapwqMUF2QnnsWwtueFZtSyZEilP0o6iUeHQwpn7LxtLUw==",
- "dev": true,
- "requires": {
- "browserslist": "^4.20.2",
- "semver": "7.0.0"
- },
- "dependencies": {
- "semver": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz",
- "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==",
- "dev": true
- }
- }
- },
- "create-ecdh": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz",
- "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==",
- "requires": {
- "bn.js": "^4.1.0",
- "elliptic": "^6.5.3"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "create-hash": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz",
- "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==",
- "requires": {
- "cipher-base": "^1.0.1",
- "inherits": "^2.0.1",
- "md5.js": "^1.3.4",
- "ripemd160": "^2.0.1",
- "sha.js": "^2.4.0"
- }
- },
- "create-hmac": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz",
- "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==",
- "requires": {
- "cipher-base": "^1.0.3",
- "create-hash": "^1.1.0",
- "inherits": "^2.0.1",
- "ripemd160": "^2.0.0",
- "safe-buffer": "^5.0.1",
- "sha.js": "^2.4.8"
- }
- },
- "cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
- "dev": true,
- "requires": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- }
- },
- "crypto-browserify": {
- "version": "3.12.0",
- "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz",
- "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==",
- "requires": {
- "browserify-cipher": "^1.0.0",
- "browserify-sign": "^4.0.0",
- "create-ecdh": "^4.0.0",
- "create-hash": "^1.1.0",
- "create-hmac": "^1.1.0",
- "diffie-hellman": "^5.0.0",
- "inherits": "^2.0.1",
- "pbkdf2": "^3.0.3",
- "public-encrypt": "^4.0.0",
- "randombytes": "^2.0.0",
- "randomfill": "^1.0.3"
- }
- },
- "d": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz",
- "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==",
- "requires": {
- "es5-ext": "^0.10.50",
- "type": "^1.0.1"
- }
- },
- "debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
- "dev": true,
- "requires": {
- "ms": "2.1.2"
- }
- },
- "deepmerge": {
- "version": "4.2.2",
- "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz",
- "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==",
- "dev": true
- },
- "define-properties": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz",
- "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==",
- "dev": true,
- "requires": {
- "has-property-descriptors": "^1.0.0",
- "object-keys": "^1.1.1"
- }
- },
- "des.js": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz",
- "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==",
- "requires": {
- "inherits": "^2.0.1",
- "minimalistic-assert": "^1.0.0"
- }
- },
- "diffie-hellman": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz",
- "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==",
- "requires": {
- "bn.js": "^4.1.0",
- "miller-rabin": "^4.0.0",
- "randombytes": "^2.0.0"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "electron-to-chromium": {
- "version": "1.4.123",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.123.tgz",
- "integrity": "sha512-0pHGE53WkYoFbsgwYcVKEpWa6jbzlvkohIEA2CUoZ9b5KC+w/zlMiQHvW/4IBcOh7YoEFqRNavgTk02TBoUTUw==",
- "dev": true
- },
- "elliptic": {
- "version": "6.5.4",
- "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
- "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
- "requires": {
- "bn.js": "^4.11.9",
- "brorand": "^1.1.0",
- "hash.js": "^1.0.0",
- "hmac-drbg": "^1.0.1",
- "inherits": "^2.0.4",
- "minimalistic-assert": "^1.0.1",
- "minimalistic-crypto-utils": "^1.0.1"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "enhanced-resolve": {
- "version": "5.9.3",
- "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz",
- "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==",
- "dev": true,
- "requires": {
- "graceful-fs": "^4.2.4",
- "tapable": "^2.2.0"
- }
- },
- "envinfo": {
- "version": "7.8.1",
- "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz",
- "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==",
- "dev": true
- },
- "es-module-lexer": {
- "version": "0.9.3",
- "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
- "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
- "dev": true
- },
- "es5-ext": {
- "version": "0.10.61",
- "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.61.tgz",
- "integrity": "sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA==",
- "requires": {
- "es6-iterator": "^2.0.3",
- "es6-symbol": "^3.1.3",
- "next-tick": "^1.1.0"
- }
- },
- "es6-iterator": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz",
- "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=",
- "requires": {
- "d": "1",
- "es5-ext": "^0.10.35",
- "es6-symbol": "^3.1.1"
- }
- },
- "es6-symbol": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz",
- "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==",
- "requires": {
- "d": "^1.0.1",
- "ext": "^1.1.2"
- }
- },
- "escalade": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
- "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
- "dev": true
- },
- "escape-string-regexp": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
- "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
- "dev": true
- },
- "eslint-scope": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
- "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
- "dev": true,
- "requires": {
- "esrecurse": "^4.3.0",
- "estraverse": "^4.1.1"
- }
- },
- "esrecurse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
- "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
- "dev": true,
- "requires": {
- "estraverse": "^5.2.0"
- },
- "dependencies": {
- "estraverse": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
- "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
- "dev": true
- }
- }
- },
- "estraverse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
- "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
- "dev": true
- },
- "estree-walker": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
- "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
- "dev": true
- },
- "esutils": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
- "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
- "dev": true
- },
- "events": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
- "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
- "dev": true
- },
- "evp_bytestokey": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz",
- "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==",
- "requires": {
- "md5.js": "^1.3.4",
- "safe-buffer": "^5.1.1"
- }
- },
- "ext": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz",
- "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==",
- "requires": {
- "type": "^2.5.0"
- },
- "dependencies": {
- "type": {
- "version": "2.6.0",
- "resolved": "https://registry.npmjs.org/type/-/type-2.6.0.tgz",
- "integrity": "sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ=="
- }
- }
- },
- "fast-deep-equal": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "dev": true
- },
- "fast-json-stable-stringify": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "dev": true
- },
- "fastest-levenshtein": {
- "version": "1.0.12",
- "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz",
- "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==",
- "dev": true
- },
- "find-up": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
- "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
- "dev": true,
- "requires": {
- "locate-path": "^5.0.0",
- "path-exists": "^4.0.0"
- }
- },
- "fsevents": {
- "version": "2.3.2",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
- "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
- "dev": true,
- "optional": true
- },
- "function-bind": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
- "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
- "dev": true
- },
- "gensync": {
- "version": "1.0.0-beta.2",
- "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
- "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
- "dev": true
- },
- "get-intrinsic": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
- "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
- "dev": true,
- "requires": {
- "function-bind": "^1.1.1",
- "has": "^1.0.3",
- "has-symbols": "^1.0.1"
- }
- },
- "glob-to-regexp": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
- "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
- "dev": true
- },
- "globals": {
- "version": "11.12.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
- "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==",
- "dev": true
- },
- "graceful-fs": {
- "version": "4.2.10",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz",
- "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
- "dev": true
- },
- "has": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
- "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
- "dev": true,
- "requires": {
- "function-bind": "^1.1.1"
- }
- },
- "has-flag": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
- "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
- "dev": true
- },
- "has-property-descriptors": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz",
- "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==",
- "dev": true,
- "requires": {
- "get-intrinsic": "^1.1.1"
- }
- },
- "has-symbols": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
- "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
- "dev": true
- },
- "hash-base": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz",
- "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==",
- "requires": {
- "inherits": "^2.0.4",
- "readable-stream": "^3.6.0",
- "safe-buffer": "^5.2.0"
- },
- "dependencies": {
- "safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
- }
- }
- },
- "hash.js": {
- "version": "1.1.7",
- "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz",
- "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==",
- "requires": {
- "inherits": "^2.0.3",
- "minimalistic-assert": "^1.0.1"
- }
- },
- "hmac-drbg": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz",
- "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==",
- "requires": {
- "hash.js": "^1.0.3",
- "minimalistic-assert": "^1.0.0",
- "minimalistic-crypto-utils": "^1.0.1"
- }
- },
- "import-local": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz",
- "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==",
- "dev": true,
- "requires": {
- "pkg-dir": "^4.2.0",
- "resolve-cwd": "^3.0.0"
- }
- },
- "inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
- },
- "interpret": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz",
- "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==",
- "dev": true
- },
- "is-core-module": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz",
- "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==",
- "dev": true,
- "requires": {
- "has": "^1.0.3"
- }
- },
- "is-module": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz",
- "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=",
- "dev": true
- },
- "is-plain-object": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
- "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
- "dev": true,
- "requires": {
- "isobject": "^3.0.1"
- }
- },
- "is-reference": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz",
- "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==",
- "dev": true,
- "requires": {
- "@types/estree": "*"
- }
- },
- "is-typedarray": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
- "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
- },
- "isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true
- },
- "isobject": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
- "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=",
- "dev": true
- },
- "jest-worker": {
- "version": "27.5.1",
- "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz",
- "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==",
- "dev": true,
- "requires": {
- "@types/node": "*",
- "merge-stream": "^2.0.0",
- "supports-color": "^8.0.0"
- },
- "dependencies": {
- "has-flag": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
- "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
- "dev": true
- },
- "supports-color": {
- "version": "8.1.1",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
- "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
- "dev": true,
- "requires": {
- "has-flag": "^4.0.0"
- }
- }
- }
- },
- "js-tokens": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
- "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
- "dev": true
- },
- "jsesc": {
- "version": "2.5.2",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz",
- "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==",
- "dev": true
- },
- "json-parse-even-better-errors": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
- "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
- "dev": true
- },
- "json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true
- },
- "json5": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz",
- "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==",
- "dev": true
- },
- "kind-of": {
- "version": "6.0.3",
- "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
- "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
- "dev": true
- },
- "loader-runner": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz",
- "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==",
- "dev": true
- },
- "locate-path": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
- "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
- "dev": true,
- "requires": {
- "p-locate": "^4.1.0"
- }
- },
- "lodash": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
- },
- "lodash.debounce": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
- "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=",
- "dev": true
- },
- "lodash.sortby": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
- "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=",
- "dev": true
- },
- "magic-string": {
- "version": "0.25.9",
- "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz",
- "integrity": "sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==",
- "dev": true,
- "requires": {
- "sourcemap-codec": "^1.4.8"
- }
- },
- "md5.js": {
- "version": "1.3.5",
- "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz",
- "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==",
- "requires": {
- "hash-base": "^3.0.0",
- "inherits": "^2.0.1",
- "safe-buffer": "^5.1.2"
- }
- },
- "merge-stream": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
- "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
- "dev": true
- },
- "miller-rabin": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz",
- "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==",
- "requires": {
- "bn.js": "^4.0.0",
- "brorand": "^1.0.1"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "dev": true
- },
- "mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "dev": true,
- "requires": {
- "mime-db": "1.52.0"
- }
- },
- "minimalistic-assert": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz",
- "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A=="
- },
- "minimalistic-crypto-utils": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz",
- "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg=="
- },
- "ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
- "dev": true
- },
- "neo-async": {
- "version": "2.6.2",
- "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
- "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
- "dev": true
- },
- "next-tick": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz",
- "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ=="
- },
- "node-gyp-build": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.4.0.tgz",
- "integrity": "sha512-amJnQCcgtRVw9SvoebO3BKGESClrfXGCUTX9hSn1OuGQTQBOZmVd0Z0OlecpuRksKvbsUqALE8jls/ErClAPuQ=="
- },
- "node-releases": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.3.tgz",
- "integrity": "sha512-maHFz6OLqYxz+VQyCAtA3PTX4UP/53pa05fyDNc9CwjvJ0yEh6+xBwKsgCxMNhS8taUKBFYxfuiaD9U/55iFaw==",
- "dev": true
- },
- "object-keys": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
- "dev": true
- },
- "object.assign": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
- "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==",
- "dev": true,
- "requires": {
- "call-bind": "^1.0.0",
- "define-properties": "^1.1.3",
- "has-symbols": "^1.0.1",
- "object-keys": "^1.1.1"
- }
- },
- "p-limit": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
- "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
- "dev": true,
- "requires": {
- "p-try": "^2.0.0"
- }
- },
- "p-locate": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
- "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
- "dev": true,
- "requires": {
- "p-limit": "^2.2.0"
- }
- },
- "p-try": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
- "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
- "dev": true
- },
- "parse-asn1": {
- "version": "5.1.6",
- "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz",
- "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==",
- "requires": {
- "asn1.js": "^5.2.0",
- "browserify-aes": "^1.0.0",
- "evp_bytestokey": "^1.0.0",
- "pbkdf2": "^3.0.3",
- "safe-buffer": "^5.1.1"
- }
- },
- "path-exists": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "dev": true
- },
- "path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true
- },
- "path-parse": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
- "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
- "dev": true
- },
- "pbkdf2": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz",
- "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==",
- "requires": {
- "create-hash": "^1.1.2",
- "create-hmac": "^1.1.4",
- "ripemd160": "^2.0.1",
- "safe-buffer": "^5.0.1",
- "sha.js": "^2.4.8"
- }
- },
- "picocolors": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
- "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
- "dev": true
- },
- "picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "dev": true
- },
- "pkg-dir": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
- "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
- "dev": true,
- "requires": {
- "find-up": "^4.0.0"
- }
- },
- "public-encrypt": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz",
- "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==",
- "requires": {
- "bn.js": "^4.1.0",
- "browserify-rsa": "^4.0.0",
- "create-hash": "^1.1.0",
- "parse-asn1": "^5.0.0",
- "randombytes": "^2.0.1",
- "safe-buffer": "^5.1.2"
- },
- "dependencies": {
- "bn.js": {
- "version": "4.12.0",
- "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz",
- "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA=="
- }
- }
- },
- "punycode": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
- "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
- "dev": true
- },
- "randombytes": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
- "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
- "requires": {
- "safe-buffer": "^5.1.0"
- }
- },
- "randomfill": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz",
- "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==",
- "requires": {
- "randombytes": "^2.0.5",
- "safe-buffer": "^5.1.0"
- }
- },
- "readable-stream": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
- "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
- "requires": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- }
- },
- "rechoir": {
- "version": "0.7.1",
- "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz",
- "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==",
- "dev": true,
- "requires": {
- "resolve": "^1.9.0"
- }
- },
- "regenerate": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz",
- "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==",
- "dev": true
- },
- "regenerate-unicode-properties": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz",
- "integrity": "sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw==",
- "dev": true,
- "requires": {
- "regenerate": "^1.4.2"
- }
- },
- "regenerator-runtime": {
- "version": "0.13.9",
- "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
- "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==",
- "dev": true
- },
- "regenerator-transform": {
- "version": "0.15.0",
- "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz",
- "integrity": "sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg==",
- "dev": true,
- "requires": {
- "@babel/runtime": "^7.8.4"
- }
- },
- "regexpu-core": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.0.1.tgz",
- "integrity": "sha512-CriEZlrKK9VJw/xQGJpQM5rY88BtuL8DM+AEwvcThHilbxiTAy8vq4iJnd2tqq8wLmjbGZzP7ZcKFjbGkmEFrw==",
- "dev": true,
- "requires": {
- "regenerate": "^1.4.2",
- "regenerate-unicode-properties": "^10.0.1",
- "regjsgen": "^0.6.0",
- "regjsparser": "^0.8.2",
- "unicode-match-property-ecmascript": "^2.0.0",
- "unicode-match-property-value-ecmascript": "^2.0.0"
- }
- },
- "regjsgen": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz",
- "integrity": "sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA==",
- "dev": true
- },
- "regjsparser": {
- "version": "0.8.4",
- "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz",
- "integrity": "sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA==",
- "dev": true,
- "requires": {
- "jsesc": "~0.5.0"
- },
- "dependencies": {
- "jsesc": {
- "version": "0.5.0",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz",
- "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=",
- "dev": true
- }
- }
- },
- "resolve": {
- "version": "1.22.0",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz",
- "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==",
- "dev": true,
- "requires": {
- "is-core-module": "^2.8.1",
- "path-parse": "^1.0.7",
- "supports-preserve-symlinks-flag": "^1.0.0"
- }
- },
- "resolve-cwd": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz",
- "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==",
- "dev": true,
- "requires": {
- "resolve-from": "^5.0.0"
- }
- },
- "resolve-from": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
- "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
- "dev": true
- },
- "ripemd160": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz",
- "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==",
- "requires": {
- "hash-base": "^3.0.0",
- "inherits": "^2.0.1"
- }
- },
- "rollup": {
- "version": "2.70.2",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.70.2.tgz",
- "integrity": "sha512-EitogNZnfku65I1DD5Mxe8JYRUCy0hkK5X84IlDtUs+O6JRMpRciXTzyCUuX11b5L5pvjH+OmFXiQ3XjabcXgg==",
- "dev": true,
- "requires": {
- "fsevents": "~2.3.2"
- }
- },
- "rollup-plugin-babel": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-babel/-/rollup-plugin-babel-4.4.0.tgz",
- "integrity": "sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw==",
- "dev": true,
- "requires": {
- "@babel/helper-module-imports": "^7.0.0",
- "rollup-pluginutils": "^2.8.1"
- }
- },
- "rollup-plugin-commonjs": {
- "version": "10.1.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz",
- "integrity": "sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==",
- "dev": true,
- "requires": {
- "estree-walker": "^0.6.1",
- "is-reference": "^1.1.2",
- "magic-string": "^0.25.2",
- "resolve": "^1.11.0",
- "rollup-pluginutils": "^2.8.1"
- }
- },
- "rollup-plugin-node-resolve": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz",
- "integrity": "sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==",
- "dev": true,
- "requires": {
- "@types/resolve": "0.0.8",
- "builtin-modules": "^3.1.0",
- "is-module": "^1.0.0",
- "resolve": "^1.11.1",
- "rollup-pluginutils": "^2.8.1"
- }
- },
- "rollup-pluginutils": {
- "version": "2.8.2",
- "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
- "integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
- "dev": true,
- "requires": {
- "estree-walker": "^0.6.1"
- }
- },
- "safe-buffer": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
- "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
- },
- "safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
- },
- "schema-utils": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
- "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
- "dev": true,
- "requires": {
- "@types/json-schema": "^7.0.8",
- "ajv": "^6.12.5",
- "ajv-keywords": "^3.5.2"
- }
- },
- "semver": {
- "version": "6.3.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
- "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
- "dev": true
- },
- "serialize-javascript": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
- "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
- "dev": true,
- "requires": {
- "randombytes": "^2.1.0"
- }
- },
- "sha.js": {
- "version": "2.4.11",
- "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz",
- "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==",
- "requires": {
- "inherits": "^2.0.1",
- "safe-buffer": "^5.0.1"
- }
- },
- "shallow-clone": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
- "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==",
- "dev": true,
- "requires": {
- "kind-of": "^6.0.2"
- }
- },
- "shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "requires": {
- "shebang-regex": "^3.0.0"
- }
- },
- "shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true
- },
- "source-map": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
- "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
- "dev": true
- },
- "source-map-support": {
- "version": "0.5.21",
- "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
- "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
- "dev": true,
- "requires": {
- "buffer-from": "^1.0.0",
- "source-map": "^0.6.0"
- },
- "dependencies": {
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
- "dev": true
- }
- }
- },
- "sourcemap-codec": {
- "version": "1.4.8",
- "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz",
- "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==",
- "dev": true
- },
- "string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
- "requires": {
- "safe-buffer": "~5.2.0"
- },
- "dependencies": {
- "safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
- }
- }
- },
- "supports-color": {
- "version": "5.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
- "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
- "dev": true,
- "requires": {
- "has-flag": "^3.0.0"
- }
- },
- "supports-preserve-symlinks-flag": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
- "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
- "dev": true
- },
- "tapable": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz",
- "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==",
- "dev": true
- },
- "terser": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/terser/-/terser-5.13.0.tgz",
- "integrity": "sha512-sgQ99P+fRBM1jAYzN9RTnD/xEWx/7LZgYTCRgmYriSq1wxxqiQPJgXkkLBBuwySDWJ2PP0PnVQyuf4xLUuH4Ng==",
- "dev": true,
- "requires": {
- "acorn": "^8.5.0",
- "commander": "^2.20.0",
- "source-map": "~0.8.0-beta.0",
- "source-map-support": "~0.5.20"
- },
- "dependencies": {
- "commander": {
- "version": "2.20.3",
- "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
- "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
- "dev": true
- },
- "source-map": {
- "version": "0.8.0-beta.0",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz",
- "integrity": "sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==",
- "dev": true,
- "requires": {
- "whatwg-url": "^7.0.0"
- }
- }
- }
- },
- "terser-webpack-plugin": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz",
- "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==",
- "dev": true,
- "requires": {
- "jest-worker": "^27.4.5",
- "schema-utils": "^3.1.1",
- "serialize-javascript": "^6.0.0",
- "source-map": "^0.6.1",
- "terser": "^5.7.2"
- },
- "dependencies": {
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
- "dev": true
- }
- }
- },
- "to-fast-properties": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz",
- "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=",
- "dev": true
- },
- "tr46": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz",
- "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=",
- "dev": true,
- "requires": {
- "punycode": "^2.1.0"
- }
- },
- "type": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz",
- "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg=="
- },
- "typedarray-to-buffer": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
- "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
- "requires": {
- "is-typedarray": "^1.0.0"
- }
- },
- "unicode-canonical-property-names-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz",
- "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==",
- "dev": true
- },
- "unicode-match-property-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz",
- "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==",
- "dev": true,
- "requires": {
- "unicode-canonical-property-names-ecmascript": "^2.0.0",
- "unicode-property-aliases-ecmascript": "^2.0.0"
- }
- },
- "unicode-match-property-value-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz",
- "integrity": "sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw==",
- "dev": true
- },
- "unicode-property-aliases-ecmascript": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz",
- "integrity": "sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ==",
- "dev": true
- },
- "uri-js": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dev": true,
- "requires": {
- "punycode": "^2.1.0"
- }
- },
- "utf-8-validate": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.9.tgz",
- "integrity": "sha512-Yek7dAy0v3Kl0orwMlvi7TPtiCNrdfHNd7Gcc/pLq4BLXqfAmd0J7OWMizUQnTTJsyjKn02mU7anqwfmUP4J8Q==",
- "requires": {
- "node-gyp-build": "^4.3.0"
- }
- },
- "util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
- },
- "uuid": {
- "version": "8.3.2",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
- "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
- },
- "watchpack": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz",
- "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==",
- "dev": true,
- "requires": {
- "glob-to-regexp": "^0.4.1",
- "graceful-fs": "^4.1.2"
- }
- },
- "webidl-conversions": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz",
- "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==",
- "dev": true
- },
- "webpack": {
- "version": "5.73.0",
- "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.73.0.tgz",
- "integrity": "sha512-svjudQRPPa0YiOYa2lM/Gacw0r6PvxptHj4FuEKQ2kX05ZLkjbVc5MnPs6its5j7IZljnIqSVo/OsY2X0IpHGA==",
- "dev": true,
- "requires": {
- "@types/eslint-scope": "^3.7.3",
- "@types/estree": "^0.0.51",
- "@webassemblyjs/ast": "1.11.1",
- "@webassemblyjs/wasm-edit": "1.11.1",
- "@webassemblyjs/wasm-parser": "1.11.1",
- "acorn": "^8.4.1",
- "acorn-import-assertions": "^1.7.6",
- "browserslist": "^4.14.5",
- "chrome-trace-event": "^1.0.2",
- "enhanced-resolve": "^5.9.3",
- "es-module-lexer": "^0.9.0",
- "eslint-scope": "5.1.1",
- "events": "^3.2.0",
- "glob-to-regexp": "^0.4.1",
- "graceful-fs": "^4.2.9",
- "json-parse-even-better-errors": "^2.3.1",
- "loader-runner": "^4.2.0",
- "mime-types": "^2.1.27",
- "neo-async": "^2.6.2",
- "schema-utils": "^3.1.0",
- "tapable": "^2.1.1",
- "terser-webpack-plugin": "^5.1.3",
- "watchpack": "^2.3.1",
- "webpack-sources": "^3.2.3"
- }
- },
- "webpack-cli": {
- "version": "4.10.0",
- "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.10.0.tgz",
- "integrity": "sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w==",
- "dev": true,
- "requires": {
- "@discoveryjs/json-ext": "^0.5.0",
- "@webpack-cli/configtest": "^1.2.0",
- "@webpack-cli/info": "^1.5.0",
- "@webpack-cli/serve": "^1.7.0",
- "colorette": "^2.0.14",
- "commander": "^7.0.0",
- "cross-spawn": "^7.0.3",
- "fastest-levenshtein": "^1.0.12",
- "import-local": "^3.0.2",
- "interpret": "^2.2.0",
- "rechoir": "^0.7.0",
- "webpack-merge": "^5.7.3"
- },
- "dependencies": {
- "commander": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
- "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
- "dev": true
- }
- }
- },
- "webpack-merge": {
- "version": "5.8.0",
- "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz",
- "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==",
- "dev": true,
- "requires": {
- "clone-deep": "^4.0.1",
- "wildcard": "^2.0.0"
- }
- },
- "webpack-sources": {
- "version": "3.2.3",
- "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz",
- "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==",
- "dev": true
- },
- "websocket": {
- "version": "1.0.34",
- "resolved": "https://registry.npmjs.org/websocket/-/websocket-1.0.34.tgz",
- "integrity": "sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ==",
- "requires": {
- "bufferutil": "^4.0.1",
- "debug": "^2.2.0",
- "es5-ext": "^0.10.50",
- "typedarray-to-buffer": "^3.1.5",
- "utf-8-validate": "^5.0.2",
- "yaeti": "^0.0.6"
- },
- "dependencies": {
- "debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "requires": {
- "ms": "2.0.0"
- }
- },
- "ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
- }
- }
- },
- "whatwg-url": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz",
- "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==",
- "dev": true,
- "requires": {
- "lodash.sortby": "^4.7.0",
- "tr46": "^1.0.1",
- "webidl-conversions": "^4.0.2"
- }
- },
- "which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "requires": {
- "isexe": "^2.0.0"
- }
- },
- "wildcard": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz",
- "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==",
- "dev": true
- },
- "yaeti": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/yaeti/-/yaeti-0.0.6.tgz",
- "integrity": "sha1-8m9ITXJoTPQr7ft2lwqhYI+/lXc="
- }
- }
-}
diff --git a/lc-esp-sdk-js/package.json b/lc-esp-sdk-js/package.json
deleted file mode 100644
index 1feb955cacba54ed85f41aed2c0c136fdc80fae3..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/package.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "name": "lc-esp-sdk-js",
- "version": "18.0.0",
- "description": "Enhanced Services Platform/CLIENT",
- "author": "LEIGH&CO",
- "license": "LGPL-3.0-only",
- "main": "dist/lc-esp-sdk.cjs.js",
- "module": "dist/lc-esp-sdk.esm.js",
- "browser": "dist/lc-esp-sdk.umd.js",
- "scripts": {
- "build": "rollup -c",
- "test": "node test/spec",
- "pretest": "npm run build"
- },
- "devDependencies": {
- "@babel/core": "^7.12.10",
- "@babel/preset-env": "^7.12.11",
- "@rollup/plugin-node-resolve": "^13.2.1",
- "rollup": "^2.70.2",
- "rollup-plugin-babel": "^4.4.0",
- "rollup-plugin-commonjs": "^10.1.0",
- "rollup-plugin-node-resolve": "^5.2.0",
- "webpack": "^5.73.0",
- "webpack-cli": "^4.10.0"
- },
- "files": [
- "dist",
- "src"
- ],
- "dependencies": {
- "@stomp/stompjs": "^6.1.2",
- "crypto-browserify": "^3.12.0",
- "lodash": "^4.17.21",
- "uuid": "^8.3.2",
- "websocket": "^1.0.34"
- }
-}
diff --git a/lc-esp-sdk-js/rollup.config.js b/lc-esp-sdk-js/rollup.config.js
deleted file mode 100644
index a704f1d72915e483c27c8ef3f201872e03e963a3..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/rollup.config.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import resolve from "rollup-plugin-node-resolve";
-import commonjs from "rollup-plugin-commonjs";
-import babel from "rollup-plugin-babel";
-import pkg from "./package.json";
-
-export default [
- {
- input: "src/index.js", // your entry point
- output: {
- name: "lc-esp-sdk-js", // package name
- file: pkg.browser,
- format: "umd",
- },
- plugins: [
- resolve(),
- commonjs({
- namedExports: {
- '../lc-esp-sdk-js/dist/lc-esp-sdk.umd.js': ['ESPSDK']
- }
- }),
- babel({
- exclude: ["node_modules/**"],
- }),
- ],
- },
- {
- input: "src/index.js", // your entry point
- output: [
- {file: pkg.main, format: "cjs"},
- {file: pkg.module, format: "es"},
- ],
- plugins: [
- babel({
- exclude: ["node_modules/**"],
- }),
- ],
- },
-];
-
diff --git a/lc-esp-sdk-js/src/index.js b/lc-esp-sdk-js/src/index.js
deleted file mode 100644
index cdf554f836604912ac64a30e193830571480a36a..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/src/index.js
+++ /dev/null
@@ -1,1275 +0,0 @@
-import {Client} from '@stomp/stompjs';
-import {v4 as uuidv4} from 'uuid';
-
-export var ESPSDK = {};
-
-ESPSDK.PREFIX_EOPARAM = 'lc.eo';
-ESPSDK.PARAM_CORRELATION_ID = "lc.esp.correlation_id";
-ESPSDK.PARAM_REPLY_TO = "lc.esp.reply_to"
-ESPSDK.KEY_EO_ID = '_id';
-ESPSDK.KEY_EO_TYPE = '_type'
-
-/*** ESP CODE ***/
-console.log('Enhanced Services Platform/SDK/JS mk18 (GIPSY DANGER)');
-
-/**
- * An ESP address. This class provides utility methods for interacting with ESP-format addresses. The provided
- * destination should be dot separated such as "a.b.c".
- *
- * If isTopic is set to true then this address if for a topic; otherwise, it is for a queue.
- *
- * @param destination A string destination.
- * @param isTopic true/false if the address refers to a topic.
- * @constructor
- */
-ESPSDK.Address = function (destination, isTopic) {
- this.destination = destination.toUpperCase();
- this.isTopic = isTopic;
-}
-
-/**
- * Return the address formatted as a STOMP consumer destination. The instance id refers to tha unique id which
- * identifies this particular flight of the software. Two clients should not use the same id or undefined results
- * will occur.
- *
- * @param instanceId The instance id for the client.
- * @returns {string} The STOMP address.
- */
-ESPSDK.Address.prototype.getStompConsumerDestination = function (instanceId) {
- // Note that the destination is MQTT format, not ESP format
- if (this.isTopic) {
- return '/queue/ESP.' + instanceId + '.' + this.destination;
- }
- return '/queue/' + this.destination;
-}
-
-/**
- * Return the address formatted as a STOMP producer destination.
- *
- * @returns {string} The STOMP address.
- */
-ESPSDK.Address.prototype.getStompProducerDestination = function () {
- // Note that the destination is MQTT format, not ESP format
- if (this.isTopic) {
- return '/topic/' + this.destination;
- }
- return '/queue/' + this.destination;
-}
-
-/**
- * Create a new message consumer.
- */
-ESPSDK.Consumer = function (espClient, subscribeCall) {
- this.espClient = espClient;
- this.subscribeCall = subscribeCall;
-}
-
-/**
- * Close the consumer and free any associated resources.
- */
-ESPSDK.Consumer.prototype.close = function () {
- this.sub.unsubscribe();
- this.espClient.consumers.delete(this);
-}
-
-/**
- * Private function. Do not call this function.
- *
- * @constructor
- */
-ESPSDK.Producer = function (espClient, destination) {
- this.espClient = espClient;
- this.destination = destination;
-}
-
-/**
- * Close the producer and free any associated resources.
- */
-ESPSDK.Producer.prototype.close = function () {
- // NOP
-}
-
-/**
- * Send a message for which a response is expected. When the reply is received it will be resolved
- * to the Promise. If no reply is received before the timeout elapses an Error will be thrown into the
- * Promise. Only one reply may be received for any one request.
- *
- * @param msg The message to send.
- *
- */
-ESPSDK.Producer.prototype.sendCommand = function (msg, timeoutMs = 60000) {
- return new Promise((resolve, reject) => {
- const correlator = this.espClient.getCorrelator();
- const correlationId = uuidv4();
- const replyTo = correlator.getDestination();
- this.send(msg, correlationId, replyTo);
- correlator.gc();
- // Note that we don't resolve - the correlator will do that later when a response is received
- correlator.register(correlationId, function (msg) {
- // console.log('Got reply! Resolving sendCommand() promise.');
- // TODO: Handle null case here?
- resolve(msg);
- }, timeoutMs);
- });
-}
-
-/**
- * Send a message on the producer. A correlation ID and reply-to address may optionally be submitted.
- *
- * @param msg The message to send.
- * @param correlationId Optionally specify a correlation ID.
- * @param replyTo Optionally specify a reply-to ESP address.
- */
-ESPSDK.Producer.prototype.send = function (msg, correlationId, replyTo) {
- let stompMsg = {};
- let headers = null;
-
- stompMsg.destination = this.destination.getStompProducerDestination();
-
- stompMsg.body = JSON.stringify(msg);
-
- if (correlationId != null) {
- headers = {};
- headers[ESPSDK.PARAM_CORRELATION_ID] = correlationId;
- }
- if (replyTo != null) {
- if (headers == null) headers = {};
- headers[ESPSDK.PARAM_REPLY_TO] = replyTo;
- }
-
- if (headers != null) {
- stompMsg.headers = headers;
- }
-
- this.espClient.getStompClient().publish(stompMsg);
-}
-
-/**
- * This class provides a correlation entry for the Correlator class.
- *
- * @param callback The callback to later call.
- * @param timeoutMs How long to wait for a reply in milliseconds.
- */
-ESPSDK.Correlation = function (callback, timeoutMs) {
- this.expireTime = Date.now() + timeoutMs;
- this.callback = callback;
-}
-
-/**
- * Determine if the correlation is expired.
- *
- * @param time
- * @returns {boolean}
- */
-ESPSDK.Correlation.prototype.isExpired = function (time) {
- return time >= this.expireTime;
-}
-
-ESPSDK.Correlation.prototype.getCallback = function () {
- return this.callback;
-}
-
-/**
- * This class handles correlation of replies and pending event handlers for replies that may be asynchronously
- * received in the future.
- *
- * @param espClient The ESP client to correlate for.
- * @constructor
- */
-ESPSDK.Correlator = function (espClient) {
- // TODO: Allow prefix to be specified
- this.destination = 'lc.global.client.' + espClient.getInstanceId() + '.cmd';
- this.pending = new Map();
- this.consumer = null;
- this.espClient = espClient;
- // We pre-set this so we don't uselessly run early
- this.lastGc = new Date();
- this.isGc = false;
-}
-
-/**
- * Garbage collect the correlator. This will remove any expired registrations from the pending list and call
- * the associated user callbacks.
- */
-ESPSDK.Correlator.prototype.gc = function () {
- this.isGc = true;
- const now = Date.now();
-
- if (now > this.lastGc + 1000) {
- // console.log('gc timeout not yet reached');
- this.isGc = false;
- return;
- }
-
- const toRemove = new Set();
-
- this.pending.forEach((value, key) => {
- if (value.isExpired(now)) toRemove.add(key);
- });
-
- toRemove.forEach((cid) => {
- // This wholly implies the correlation is valid
- const p = this.pending.get(cid).getCallback();
-
- // But we do allow null callbacks
- if (p != null) {
- callback(null);
- } else {
- console.log('WARNING: Reply promise was null: ' + cid);
- }
-
- this.pending.delete(cid);
- })
-
- this.isGc = false;
-}
-
-/**
- * Create the correlator consumer in the ESP client.
- */
-ESPSDK.Correlator.prototype.createConsumer = function () {
- const correlator = this;
- this.consumer = this.espClient.createConsumer(new ESPSDK.Address(this.destination),
- function (msg, correlationId) {
- // TODO: We need to be able to get the correlation-id here!
- // console.log('REPLY: ' + correlationId + ' => ' + msg);
- let correlation = correlator.pending.get(correlationId);
-
- if (correlation == null) {
- console.log('WARNING: Received reply but no correlation registered: ' + correlationId);
- return;
- }
- correlator.pending.delete(correlationId);
- // console.log('Calling correlated callback...');
- correlation.getCallback()(msg);
- });
- this.gc();
-}
-
-/**
- * Close the correlator and release any associated resources.
- */
-ESPSDK.Correlator.prototype.close = function () {
- // TODO: Deal with the correlator
- if (this.consumer != null) this.consumer.close();
-}
-
-/**
- * Return the destination for this correlator in bare format.
- *
- * @returns {any} The destination.
- */
-ESPSDK.Correlator.prototype.getDestination = function () {
- return this.destination;
-}
-
-/**
- * Register a new correlator for future processing. When a reply is received the callback will be called
- * with the message. If the timeout elapses before a message is received the callback will be called with
- * a null
argument.
- *
- * @param id The correlation ID.
- * @param callback The callback when a reply is received or the timeout occurs.
- * @param timeoutMs The timeout in milliseconds.
- */
-ESPSDK.Correlator.prototype.register = function (id, callback, timeoutMs = 60000) {
- this.pending.set(id, new ESPSDK.Correlation(callback, timeoutMs));
- this.gc();
-}
-
-/**
- * An Enhanced Services Platform client. This client implementation provides support for ESP mk18 and connects
- * to the ESP engine service via STOMP over WebSocket. Multiple ESP clients can be present in a web application
- * at any one time.
- *
- * The URL for LC production is 'ws://esp1.leigh-co.com:52021/ws'.
- *
- * After the client is created, it must be started by calling the connect() method.
- *
- * @constructor Create a new Client.
- */
-ESPSDK.Client = function (engineURL) {
- this.instanceId = uuidv4();
- this.consumers = new Set();
- this.stompClient = new Client({
- brokerURL: engineURL,
- debug: function (str) {
- console.log(str);
- },
- reconnectDelay: 5000,
- heartbeatIncoming: 4000,
- heartbeatOutgoing: 4000,
- });
- this.hasConnected = false;
- this.correlator = new ESPSDK.Correlator(this);
-}
-
-/**
- * Return the Correlator associated with this client.
- *
- * @returns {*} The correlator.
- */
-ESPSDK.Client.prototype.getCorrelator = function () {
- return this.correlator;
-}
-
-/**
- * Start the connection to ESP. When the connection is established the promise is fulfilled with the newly created
- * client.
- */
-ESPSDK.Client.prototype.connect = function () {
- return new Promise((resolve) => {
- const espClient = this;
- this.stompClient.onConnect = function () {
- // Subscribe to any consumers we have
-
- // Note that creating the consumers is actually asynchronous, so here lurks a race condition in the case
- // of bare topics, or virtual topics for the time gap between first-registration of the virtual topic.
- // In normal operation this should not be an issue.
-
- espClient.consumers.forEach((value) => {
- value.subscribeCall();
- });
-
- espClient.getCorrelator().createConsumer();
-
- if (!espClient.hasConnected) {
- console.log('ESP has connected: ' + resolve);
- espClient.hasConnected = true;
- resolve(espClient);
- }
- }
- this.stompClient.activate();
- });
-}
-
-/**
- * Stop the connection to ESP. This asynchronously disconnects any underlying network connections.
- */
-ESPSDK.Client.prototype.close = function () {
- this.stompClient.deactivate();
-}
-
-/**
- * Return the STOMP client associated with this client.
- *
- * @returns {Client} The STOMP client.
- */
-ESPSDK.Client.prototype.getStompClient = function () {
- return this.stompClient;
-}
-
-/**
- * Return the instance ID for this client. A new instance ID is automatically generated each time an Client
- * is created and is unchanging for the life of the client.
- *
- * @returns {*} The instance ID.
- */
-ESPSDK.Client.prototype.getInstanceId = function () {
- return this.instanceId;
-}
-
-/**
- * Create a new consumer for the specified address.
- *
- * Multiple consumers for the same address on the same client should not be created. If multiple consumers for the
- * same address are created undefined results will occur.
- *
- * @param address The Address for the consumer.
- * @param msgCallback The message callback. This function will be called every time a message is received on the
- * consumer.
- * @returns {*}
- */
-ESPSDK.Client.prototype.createConsumer = function (address, callback) {
- const esp = this;
- const consumer = new ESPSDK.Consumer(this, function () {
- // Note subtly that the subscription may actually not be ready by the time we return, but that is OK.
- consumer.sub = esp.stompClient.subscribe(address.getStompConsumerDestination(esp.getInstanceId()),
- function (msg) {
- // console.log("Received bare message: " + msg);
- callback(JSON.parse(msg.body), msg.headers[ESPSDK.PARAM_CORRELATION_ID]);
- });
- });
- this.consumers.add(consumer);
- consumer.subscribeCall();
- return consumer;
-}
-
-/**
- * Create a new producer for the specified destination.
- *
- * Multiple producers for the same address on the same client should not be created. If multiple producers
- * for the same address are created undefined results will occur.
- *
- * @param address The Address for the producer.
- * @returns {ESPSDK.Producer}
- */
-ESPSDK.Client.prototype.createProducer = function (address) {
- return new ESPSDK.Producer(this, address);
-}
-
-/**
- * This class processes an ESP graph stream. It maintains a local copy of the graph and applies the received
- * delta messages from the backend service.
- *
- * @constructor
- */
-ESPSDK.GraphStream = function () {
- this.graph = null;
- this.txid = null;
- this.graphIndex = new Map();
- this.changeListener = null;
-}
-
-/**
- * Return the current transaction-id for this graph.
- */
-ESPSDK.GraphStream.prototype.getTxId = function () {
- return this.txid;
-}
-
-/**
- * Set the change listener for this graph stream. Only one listener can be set at a time. The listener will be
- * called once for every mutation, and the mutation delta will be passed into the function.
- *
- * @param listener The callback function.
- */
-ESPSDK.GraphStream.prototype.setChangeListener = function (callback) {
- this.changeListener = callback;
-}
-
-/**
- * Process a message from the stream.
- *
- * This function will queue deltas until a state response is received. The initial graph is established from
- * that state response, and once it is loaded, then any pending changes are applied in the order
- * in which they were received.
- *
- * @param msg The message to handle.
- */
-ESPSDK.GraphStream.prototype.process = function (msg, callback) {
- console.log("Received message: " + JSON.stringify(msg));
- if (this.graph == null) {
- this.graph = lc.esp.v18.Graph_state_responseElementDAO.getGraph(msg)[0];
- this.txid = lc.esp.v18.Graph_state_responseElementDAO.getTxid(msg);
- console.log('Received graph: ' + this.txid + ' ' + this.graph);
- this._index(this.graph);
- console.log('Built initial graph index: ' + this.graphIndex.size);
- callback();
- } else {
- // console.log('Received delta: ' + JSON.stringify(msg));
- const deltaTxid = lc.esp.v18.Graph_txnElementDAO.getTxid(msg);
- if (deltaTxid <= this.txid) {
- console.log('Received redundant delta.');
- return;
- }
-
- if (deltaTxid !== this.txid + 1) {
- console.log('WARNING: Unexpected txid in stream. This/Delta: ' + this.txid + ' ' + deltaTxid);
- }
-
- const changes = lc.esp.v18.Graph_txnElementDAO.getChangeset(msg);
- const gs = this;
- changes.forEach(function (change) {
- gs.processDelta(change);
- });
-
- console.log('Updated graph: ' + JSON.stringify(this.graph));
-
- this.txid = deltaTxid;
- }
-}
-
-/**
- * Process an individual change. This will apply the given mutation to the graph.
- *
- * @param change The change EO to process.
- */
-ESPSDK.GraphStream.prototype.processDelta = function (change) {
- // console.log('Processing delta: ' + JSON.stringify(change));
-
- switch (change['_type']) {
- case lc.eo.changestream.v1.Attribute_setElementDAO.eoType: {
- // For an object we already have, set an attribute.
- let id = lc.eo.changestream.v1.Attribute_setElementDAO.getId(change);
- let obj = this.graphIndex.get(id);
- if (obj == null) {
- console.log('WARNING: Could not locate object for change: ' + JSON.stringify(change));
- return;
- }
- let key = lc.eo.changestream.v1.Attribute_setElementDAO.getKey(change);
- let value = lc.eo.changestream.v1.Attribute_setElementDAO.getValue(change);
- obj[key] = value;
- }
- break;
- case lc.eo.changestream.v1.Object_addElementDAO.eoType: {
- let id = lc.eo.changestream.v1.Object_addElementDAO.getId(change);
- let type = lc.eo.changestream.v1.Object_addElementDAO.getType(change);
- let obj = {};
-
- obj[ESPSDK.KEY_EO_ID] = id;
- obj[ESPSDK.KEY_EO_TYPE] = type;
-
- this.graphIndex.set(id, obj);
- }
- break;
- case lc.eo.changestream.v1.Loop_addElementDAO.eoType: {
- let parentId = lc.eo.changestream.v1.Loop_addElementDAO.getId(change);
- let parentObj = this.graphIndex.get(parentId);
- if (parentObj == null) {
- throw 'Could not locate object for change: ' + change;
- }
-
- let childId = lc.eo.changestream.v1.Loop_addElementDAO.getChildId(change);
- let childObj = this.graphIndex.get(childId);
- if (childObj == null) {
- throw 'Could not locate object for change: ' + change;
- }
-
- let key = lc.eo.changestream.v1.Loop_addElementDAO.getKey(change);
-
- if (!parentObj.hasOwnProperty(key)) {
- parentObj[key] = [];
- }
- parentObj[key].push(childObj);
-
- }
- break;
- case lc.eo.changestream.v1.Loop_removeElementDAO.eoType: {
- let id = lc.eo.changestream.v1.Loop_removeElementDAO.getId(change);
-
- let obj = this.graphIndex.get(id);
- if (obj == null) {
- throw 'Could not locate object for change: ' + change;
- }
-
- let key = lc.eo.changestream.v1.Loop_removeElementDAO.getKey(change);
- let idx = lc.eo.changestream.v1.Loop_removeElementDAO.getIndex(change);
- let loop = obj[key];
- if (loop == null) {
- throw 'Loop unexpectedly empty during change mutation';
- }
-
- console.log('Located loop for remove: ' + JSON.stringify(loop));
- loop.splice(loop.indexOf(idx), 1);
- }
- break;
-
- default:
- console.log('WARNING: Received unknown delta: ' + JSON.stringify(change));
- }
- if (this.changeListener != null) this.changeListener(change);
-}
-
-/**
- * Private function - do not call.
- */
-ESPSDK.GraphStream.prototype._index = function (eo) {
- const id = eo[ESPSDK.KEY_EO_ID];
-
- if (id == null) {
- console.log('ERROR: Failed to index EO which does not contain an id: ' + JSON.stringify(eo));
- return;
- }
-
- this.graphIndex.set(id, eo);
-
- for (const [key, value] of Object.entries(eo)) {
- if (Array.isArray(value)) {
- value.forEach((child) => {
- this._index(child);
- });
- }
- }
-}
-
-/**
- * This class performs the necessary low-level processing to attach to an ESP stream. Note that this class
- * does not provide any processing of the contents of the messages whatsoever.
- *
- * First, a consumer is created for the provided streamAddress, and messages begin to be received. Messages
- * that are received at this stage are queued.
- *
- * Second, the provided request is sent to the provided cmdAddress in order to perform a sync operation.
- *
- * Third, when the sync command returns a response, that response is sent to the provided callback.
- *
- * Fourth, all pending messages that had previously been queued are replayed in the order they were received
- * to the provided callback.
- *
- * From this point forward, received messages on the stream address are immediately sent to the provided callback.
- *
- * This implementation has a major limitation that only one stream may be monitored per address.
- *
- * @param esp The ESP client.
- * @param streamAddress The address to receive stream updates.
- * @param cmdAddress The address to send commands to.
- * @param request The request message to send to the backend.
- * @param callback The callback handler when messages are received.
- */
-ESPSDK.Stream = function (esp, streamAddress, cmdAddress, request, callback) {
- this.pending = new Set();
- this.isSynced = false;
- this.callback = callback;
- const s = this;
-
- this.consumer = esp.createConsumer(streamAddress, function (msg) {
- if (s.isSynced === false) {
- s.pending.add(msg);
- } else {
- s.callback(msg);
- }
- });
-
- this.producer = esp.createProducer(cmdAddress);
-
- this.producer.sendCommand(request).then(function (msg) {
- s.callback(msg);
- s.isSynced = true;
- s.pending.forEach(function (v) {
- s.callback(v);
- });
- s.pending = null;
- }, 5000);
-}
-
-/**
- * Close the ESP stream and release any associated resources.
- */
-ESPSDK.Stream.prototype.close = function () {
- this.producer.close();
- this.consumer.close();
-}
-
-/******************************************************************************************************************/
-
-const lc = {};
-ESPSDK.lc = lc;
-lc.eo = {};
-lc.eo.changestream = {};
-lc.eo.changestream.v1 = {};
-lc.eo.changestream.v1.Change_setElementDAO = {};
-
-lc.eo.changestream.v1.Change_setElementDAO.eoType = "lc.eo.changestream.v1.change_set";
-
-lc.eo.changestream.v1.Change_setElementDAO.KEY_CHANGES = "changes";
-
-lc.eo.changestream.v1.Change_setElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.change_set'};
-};
-
-lc.eo.changestream.v1.Change_setElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Change_setElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=9ec3d43d-9c35-42e0-9b03-e1c9989d8542, data={eoType=changes, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=4225b071-28fe-43d7-ab13-8f7f5e9dfb43, data={text=changes, locale=en}, meta=null}]} , type=loop}, meta=null} */
-
-
-lc.eo.changestream.v1.Change_setElementDAO.getChanges = function (eo) {
- if (!lc.eo.changestream.v1.Change_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Change_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Change_setElementDAO.KEY_CHANGES)];
-
-};
-
-lc.eo.changestream.v1.Change_setElementDAO.setChanges = function (eo, value) {
- if (!lc.eo.changestream.v1.Change_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Change_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Change_setElementDAO.KEY_CHANGES] = value;
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO = {};
-
-lc.eo.changestream.v1.Loop_setElementDAO.eoType = "lc.eo.changestream.v1.loop_set";
-
-lc.eo.changestream.v1.Loop_setElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Loop_setElementDAO.KEY_KEY = "key";
-lc.eo.changestream.v1.Loop_setElementDAO.KEY_INDEX = "index";
-lc.eo.changestream.v1.Loop_setElementDAO.KEY_CHILDID = "childId";
-
-lc.eo.changestream.v1.Loop_setElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.loop_set'};
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Loop_setElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=4ff74153-4085-48ff-bf44-446e767b032b, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=ac21b155-c22b-42af-a982-0887bc44273a, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_setElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_setElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_setElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=6741fb7a-1d96-4984-a0b2-97b3346533c9, data={eoType=key, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=2f4e12dd-27d9-45e4-a66b-1853deb08e63, data={text=key, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_setElementDAO.getKey = function (eo) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_setElementDAO.KEY_KEY)];
-
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO.setKey = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_setElementDAO.KEY_KEY] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=da07d883-e376-4768-bf87-264fcab6354e, data={eoType=index, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=4b44e047-f64e-42b6-adec-03e1abdafebe, data={text=index, locale=en}, meta=null}]} , type=number}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_setElementDAO.getIndex = function (eo) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_setElementDAO.KEY_INDEX)];
-
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO.setIndex = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_setElementDAO.KEY_INDEX] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=2365ae19-1874-428c-acba-e41bb41a6675, data={eoType=childId, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=c60848ad-d58e-4ffe-a824-896ec8036cec, data={text=Child EO ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_setElementDAO.getChildId = function (eo) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_setElementDAO.KEY_CHILDID)];
-
-};
-
-lc.eo.changestream.v1.Loop_setElementDAO.setChildId = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_setElementDAO.KEY_CHILDID] = value;
-};
-
-lc.eo.changestream.v1.Loop_removeElementDAO = {};
-
-lc.eo.changestream.v1.Loop_removeElementDAO.eoType = "lc.eo.changestream.v1.loop_remove";
-
-lc.eo.changestream.v1.Loop_removeElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Loop_removeElementDAO.KEY_KEY = "key";
-lc.eo.changestream.v1.Loop_removeElementDAO.KEY_INDEX = "index";
-
-lc.eo.changestream.v1.Loop_removeElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.loop_remove'};
-};
-
-lc.eo.changestream.v1.Loop_removeElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Loop_removeElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=1533eb7f-d4e8-4c2b-ae9b-260cf55de222, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=4e560415-1d5f-4d85-a2f2-78762e2e1c09, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_removeElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_removeElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Loop_removeElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_removeElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=3278dad2-8ee1-4403-8c6c-bd7437ca2ef2, data={eoType=key, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=47ce4b59-ad93-47bd-bf17-8a7bbc0ee7e9, data={text=key, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_removeElementDAO.getKey = function (eo) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_removeElementDAO.KEY_KEY)];
-
-};
-
-lc.eo.changestream.v1.Loop_removeElementDAO.setKey = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_removeElementDAO.KEY_KEY] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=4ebb782c-b20d-4d5c-b1e8-a2cb1d937305, data={eoType=index, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=8e832adb-70ed-4b7e-a49a-d91ea40388aa, data={text=Index, locale=en}, meta=null}]} , type=number}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_removeElementDAO.getIndex = function (eo) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_removeElementDAO.KEY_INDEX)];
-
-};
-
-lc.eo.changestream.v1.Loop_removeElementDAO.setIndex = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_removeElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_removeElementDAO.KEY_INDEX] = value;
-};
-
-lc.eo.changestream.v1.Loop_addElementDAO = {};
-
-lc.eo.changestream.v1.Loop_addElementDAO.eoType = "lc.eo.changestream.v1.loop_add";
-
-lc.eo.changestream.v1.Loop_addElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Loop_addElementDAO.KEY_KEY = "key";
-lc.eo.changestream.v1.Loop_addElementDAO.KEY_CHILDID = "childId";
-
-lc.eo.changestream.v1.Loop_addElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.loop_add'};
-};
-
-lc.eo.changestream.v1.Loop_addElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Loop_addElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=0711935e-9525-412d-8d0f-d16da9115399, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=399293ae-6729-4c25-ab05-50a4b7417d4f, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_addElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_addElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Loop_addElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_addElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=ba2c14fe-d602-4d48-a508-8b3affb69bc5, data={eoType=key, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=09a08417-5846-475c-869c-aaef3081b774, data={text=key, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_addElementDAO.getKey = function (eo) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_addElementDAO.KEY_KEY)];
-
-};
-
-lc.eo.changestream.v1.Loop_addElementDAO.setKey = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_addElementDAO.KEY_KEY] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=60fa1731-2e33-4d16-8d57-d7c26b544f8b, data={eoType=childId, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=78c6e2a3-608a-4353-a12e-4d111d3bd691, data={text=Child EO ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Loop_addElementDAO.getChildId = function (eo) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Loop_addElementDAO.KEY_CHILDID)];
-
-};
-
-lc.eo.changestream.v1.Loop_addElementDAO.setChildId = function (eo, value) {
- if (!lc.eo.changestream.v1.Loop_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Loop_addElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Loop_addElementDAO.KEY_CHILDID] = value;
-};
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO = {};
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType = "lc.eo.changestream.v1.attribute_delete";
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_KEY = "key";
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.attribute_delete'};
-};
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=6e9a936f-a99f-4e31-ac4e-e4a2880dc555, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=16804526-4aee-4162-8548-4a92db0ba357, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Attribute_deleteElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Attribute_deleteElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=5da04b24-c7d8-40c5-8fe6-93a1ba6721d2, data={eoType=key, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=e64330d3-312c-45d4-b1e1-9e871b7126de, data={text=key, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.getKey = function (eo) {
- if (!lc.eo.changestream.v1.Attribute_deleteElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_KEY)];
-
-};
-
-lc.eo.changestream.v1.Attribute_deleteElementDAO.setKey = function (eo, value) {
- if (!lc.eo.changestream.v1.Attribute_deleteElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_deleteElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Attribute_deleteElementDAO.KEY_KEY] = value;
-};
-
-lc.eo.changestream.v1.Attribute_setElementDAO = {};
-
-lc.eo.changestream.v1.Attribute_setElementDAO.eoType = "lc.eo.changestream.v1.attribute_set";
-
-lc.eo.changestream.v1.Attribute_setElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Attribute_setElementDAO.KEY_KEY = "key";
-lc.eo.changestream.v1.Attribute_setElementDAO.KEY_VALUE = "value";
-
-lc.eo.changestream.v1.Attribute_setElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.attribute_set'};
-};
-
-lc.eo.changestream.v1.Attribute_setElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Attribute_setElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=335f4bf5-e278-464b-933e-f4c1a5730c2a, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=12df1cd2-3bf6-4c44-9d9d-6cda4c17ab48, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Attribute_setElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Attribute_setElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Attribute_setElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Attribute_setElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=8b915420-e117-4a81-ae56-df23b0e0a295, data={eoType=key, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=723f290e-8dda-4eb1-b746-a126ce7e0f6f, data={text=key, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Attribute_setElementDAO.getKey = function (eo) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Attribute_setElementDAO.KEY_KEY)];
-
-};
-
-lc.eo.changestream.v1.Attribute_setElementDAO.setKey = function (eo, value) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Attribute_setElementDAO.KEY_KEY] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=b8c223a1-289a-4263-8ec2-28c7d1d562d8, data={eoType=value, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=f8d99001-5d8c-43d9-a7d1-80d4a9751055, data={text=value, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Attribute_setElementDAO.getValue = function (eo) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Attribute_setElementDAO.KEY_VALUE)];
-
-};
-
-lc.eo.changestream.v1.Attribute_setElementDAO.setValue = function (eo, value) {
- if (!lc.eo.changestream.v1.Attribute_setElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Attribute_setElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Attribute_setElementDAO.KEY_VALUE] = value;
-};
-
-lc.eo.changestream.v1.Object_removeElementDAO = {};
-
-lc.eo.changestream.v1.Object_removeElementDAO.eoType = "lc.eo.changestream.v1.object_remove";
-
-lc.eo.changestream.v1.Object_removeElementDAO.KEY_ID = "id";
-
-lc.eo.changestream.v1.Object_removeElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.object_remove'};
-};
-
-lc.eo.changestream.v1.Object_removeElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Object_removeElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=46d4084e-92a2-4fd0-bc2c-94876ee5dfb7, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=834d08f4-19ee-441b-ac8b-e42e94dc9254, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Object_removeElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Object_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_removeElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Object_removeElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Object_removeElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Object_removeElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_removeElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Object_removeElementDAO.KEY_ID] = value;
-};
-
-lc.eo.changestream.v1.Object_addElementDAO = {};
-
-lc.eo.changestream.v1.Object_addElementDAO.eoType = "lc.eo.changestream.v1.object_add";
-
-lc.eo.changestream.v1.Object_addElementDAO.KEY_ID = "id";
-lc.eo.changestream.v1.Object_addElementDAO.KEY_TYPE = "type";
-
-lc.eo.changestream.v1.Object_addElementDAO.create = function () {
- return {'_type': 'lc.eo.changestream.v1.object_add'};
-};
-
-lc.eo.changestream.v1.Object_addElementDAO.assertType = function (eo) {
- return lc.eo.changestream.v1.Object_addElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=eac10692-b4c1-4b64-8a9c-38af2b60c3cf, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=46c85401-794a-434c-b6ec-ed2e6fe5a704, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.eo.changestream.v1.Object_addElementDAO.getId = function (eo) {
- if (!lc.eo.changestream.v1.Object_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_addElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Object_addElementDAO.KEY_ID)];
-
-};
-
-lc.eo.changestream.v1.Object_addElementDAO.setId = function (eo, value) {
- if (!lc.eo.changestream.v1.Object_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_addElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Object_addElementDAO.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=06ee16ca-749f-4cca-9a83-0d962aa94641, data={eoType=type, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=1013612d-bf3b-44d7-880d-1ebd5268f4fc, data={text=Type, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.eo.changestream.v1.Object_addElementDAO.getType = function (eo) {
- if (!lc.eo.changestream.v1.Object_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_addElementDAO.eoType + "]";
- return eo[(lc.eo.changestream.v1.Object_addElementDAO.KEY_TYPE)];
-
-};
-
-lc.eo.changestream.v1.Object_addElementDAO.setType = function (eo, value) {
- if (!lc.eo.changestream.v1.Object_addElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.eo.changestream.v1.Object_addElementDAO.eoType + "]";
- eo[lc.eo.changestream.v1.Object_addElementDAO.KEY_TYPE] = value;
-};
-
-
-/********************************************************************************************************************/
-lc.esp = {};
-lc.esp.test = {};
-lc.esp.test.v1 = {};
-lc.esp.test.v1.BeaconElementDAO = {};
-lc.esp.test.v1.BeaconElementDAO = {};
-
-lc.esp.test.v1.BeaconElementDAO.eoType = "lc.esp.test.v1.beacon";
-
-lc.esp.test.v1.BeaconElementDAO.KEY_ID = "id";
-lc.esp.test.v1.BeaconElementDAO.KEY_TIME = "time";
-
-lc.esp.test.v1.BeaconElementDAO.create = function () {
- return {'_type': 'lc.esp.test.v1.beacon'};
-};
-
-lc.esp.test.v1.BeaconElementDAO.assertType = function (eo) {
- return this.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.esp.test.v1.BeaconElementDAO.getId = function (eo) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- return eo[(this.KEY_ID)];
-
-};
-
-lc.esp.test.v1.BeaconElementDAO.setId = function (eo, value) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- eo[this.KEY_ID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', data={eoType=time, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=time, locale=en}, meta=null}]} , type=instant}, meta=null} */
-
-
-lc.esp.test.v1.BeaconElementDAO.getTime = function (eo) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- return eo[(this.KEY_TIME)];
-
-};
-
-lc.esp.test.v1.BeaconElementDAO.setTime = function (eo, value) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- eo[this.KEY_TIME] = value;
-};
-
-lc.esp.test.v1.Greeting_requestElementDAO = {};
-
-lc.esp.test.v1.Greeting_requestElementDAO.eoType = "lc.esp.test.v1.greeting_request";
-
-lc.esp.test.v1.Greeting_requestElementDAO.KEY_NAME = "name";
-
-lc.esp.test.v1.Greeting_requestElementDAO.create = function () {
- return {'_type': 'lc.esp.test.v1.greeting_request'};
-};
-
-lc.esp.test.v1.Greeting_requestElementDAO.assertType = function (eo) {
- return this.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', data={eoType=name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.esp.test.v1.Greeting_requestElementDAO.getName = function (eo) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- return eo[(this.KEY_NAME)];
-
-};
-
-lc.esp.test.v1.Greeting_requestElementDAO.setName = function (eo, value) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- eo[this.KEY_NAME] = value;
-};
-
-lc.esp.test.v1.Greeting_responseElementDAO = {};
-
-lc.esp.test.v1.Greeting_responseElementDAO.eoType = "lc.esp.test.v1.greeting_response";
-
-lc.esp.test.v1.Greeting_responseElementDAO.KEY_MESSAGE = "message";
-
-lc.esp.test.v1.Greeting_responseElementDAO.create = function () {
- return {'_type': 'lc.esp.test.v1.greeting_response'};
-};
-
-lc.esp.test.v1.Greeting_responseElementDAO.assertType = function (eo) {
- return this.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', data={eoType=message, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Message, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
-lc.esp.test.v1.Greeting_responseElementDAO.getMessage = function (eo) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- return eo[(this.KEY_MESSAGE)];
-
-};
-
-lc.esp.test.v1.Greeting_responseElementDAO.setMessage = function (eo, value) {
- if (!this.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + this.eoType + "]";
- eo[this.KEY_MESSAGE] = value;
-};
-
-
-/*********************************************************************************************************************/
-
-lc.esp = {};
-lc.esp.v18 = {};
-lc.esp.v18.Graph_state_requestElementDAO = {};
-
-lc.esp.v18.Graph_state_requestElementDAO.eoType = "lc.esp.v18.graph_state_request";
-
-
-lc.esp.v18.Graph_state_requestElementDAO.create = function () {
- return {'_type': 'lc.esp.v18.graph_state_request'};
-};
-
-lc.esp.v18.Graph_state_requestElementDAO.assertType = function (eo) {
- return lc.esp.v18.Graph_state_requestElementDAO.eoType === eo['_type'];
-};
-
-lc.esp.v18.Graph_state_responseElementDAO = {};
-
-lc.esp.v18.Graph_state_responseElementDAO.eoType = "lc.esp.v18.graph_state_response";
-
-lc.esp.v18.Graph_state_responseElementDAO.KEY_TXID = "txid";
-lc.esp.v18.Graph_state_responseElementDAO.KEY_GRAPH = "graph";
-
-lc.esp.v18.Graph_state_responseElementDAO.create = function () {
- return {'_type': 'lc.esp.v18.graph_state_response'};
-};
-
-lc.esp.v18.Graph_state_responseElementDAO.assertType = function (eo) {
- return lc.esp.v18.Graph_state_responseElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=6902986e-ba71-46ac-8976-c59c468fbae5, data={eoType=txid, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=f63a51b4-bb39-451c-8eb6-eb26fa42d0f9, data={text=Transaction ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.esp.v18.Graph_state_responseElementDAO.getTxid = function (eo) {
- if (!lc.esp.v18.Graph_state_responseElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_state_responseElementDAO.eoType + "]";
- return eo[(lc.esp.v18.Graph_state_responseElementDAO.KEY_TXID)];
-
-};
-
-lc.esp.v18.Graph_state_responseElementDAO.setTxid = function (eo, value) {
- if (!lc.esp.v18.Graph_state_responseElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_state_responseElementDAO.eoType + "]";
- eo[lc.esp.v18.Graph_state_responseElementDAO.KEY_TXID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=de23a3c9-8235-457d-8e80-50412da00b07, data={eoType=graph, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=d89d097d-8ef5-453c-9090-9ac8f0c14bba, data={text=Graph, locale=en}, meta=null}]} , type=loop}, meta=null} */
-
-
-lc.esp.v18.Graph_state_responseElementDAO.getGraph = function (eo) {
- if (!lc.esp.v18.Graph_state_responseElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_state_responseElementDAO.eoType + "]";
- return eo[(lc.esp.v18.Graph_state_responseElementDAO.KEY_GRAPH)];
-
-};
-
-lc.esp.v18.Graph_state_responseElementDAO.setGraph = function (eo, value) {
- if (!lc.esp.v18.Graph_state_responseElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_state_responseElementDAO.eoType + "]";
- eo[lc.esp.v18.Graph_state_responseElementDAO.KEY_GRAPH] = value;
-};
-
-lc.esp.v18.Graph_txnElementDAO = {};
-
-lc.esp.v18.Graph_txnElementDAO.eoType = "lc.esp.v18.graph_txn";
-
-lc.esp.v18.Graph_txnElementDAO.KEY_TXID = "txid";
-lc.esp.v18.Graph_txnElementDAO.KEY_CHANGESET = "changeset";
-
-lc.esp.v18.Graph_txnElementDAO.create = function () {
- return {'_type': 'lc.esp.v18.graph_txn'};
-};
-
-lc.esp.v18.Graph_txnElementDAO.assertType = function (eo) {
- return lc.esp.v18.Graph_txnElementDAO.eoType === eo['_type'];
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=858ecbd1-a42b-49ed-93d9-ffd0f66127af, data={eoType=txid, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=74b8973c-a1b6-4621-8101-1c02367ddce2, data={text=Transaction ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
-lc.esp.v18.Graph_txnElementDAO.getTxid = function (eo) {
- if (!lc.esp.v18.Graph_txnElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_txnElementDAO.eoType + "]";
- return eo[(lc.esp.v18.Graph_txnElementDAO.KEY_TXID)];
-
-};
-
-lc.esp.v18.Graph_txnElementDAO.setTxid = function (eo, value) {
- if (!lc.esp.v18.Graph_txnElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_txnElementDAO.eoType + "]";
- eo[lc.esp.v18.Graph_txnElementDAO.KEY_TXID] = value;
-};
-
-
-/* EO{typeName='lc.eo.schema.Attribute', id=d45bb887-03e0-4a9d-b004-cf4fd20e2809, data={eoType=changeset, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=89659871-892d-4a51-8e13-f19e1d696002, data={text=Graph Deltas, locale=en}, meta=null}]} , type=loop}, meta=null} */
-
-
-lc.esp.v18.Graph_txnElementDAO.getChangeset = function (eo) {
- if (!lc.esp.v18.Graph_txnElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_txnElementDAO.eoType + "]";
- return eo[(lc.esp.v18.Graph_txnElementDAO.KEY_CHANGESET)];
-
-};
-
-lc.esp.v18.Graph_txnElementDAO.setChangeset = function (eo, value) {
- if (!lc.esp.v18.Graph_txnElementDAO.assertType(eo)) throw "Mismatched EO type: [found: " + eo['eoType'] + "] [expected: " + lc.esp.v18.Graph_txnElementDAO.eoType + "]";
- eo[lc.esp.v18.Graph_txnElementDAO.KEY_CHANGESET] = value;
-};
-
diff --git a/lc-esp-sdk-js/webpack.config.js b/lc-esp-sdk-js/webpack.config.js
deleted file mode 100644
index 00b8bf0ae82009cc789400333780de9872a48251..0000000000000000000000000000000000000000
--- a/lc-esp-sdk-js/webpack.config.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const path = require('path');
-
-module.exports = {
- entry: './src/index.js',
- output: {
- filename: 'index.js',
- path: path.resolve(__dirname, 'dist'),
- library: 'ESPSDK',
- libraryTarget: 'window',
- libraryExport: 'ESPSDK'
- },
-};
diff --git a/lc-esp-sdk/LICENSE.md b/lc-esp-sdk/LICENSE.md
deleted file mode 100644
index a723b3d269ad04bc083c21a75c6af35f448a9b6a..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/LICENSE.md
+++ /dev/null
@@ -1,636 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license
-document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and
-other kinds of works.
-
-The licenses for most software and other practical works are designed to take
-away your freedom to share and change the works. By contrast, the GNU General
-Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users.
-We, the Free Software Foundation, use the GNU General Public License for most
-of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our
-General Public Licenses are designed to make sure that you have the freedom to
-distribute copies of free software (and charge for them if you wish), that you
-receive source code or can get it if you want it, that you can change the
-software or use pieces of it in new free programs, and that you know you can do
-these things.
-
-To protect your rights, we need to prevent others from denying you these rights
-or asking you to surrender the rights. Therefore, you have certain
-responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for
-a fee, you must pass on to the recipients the same freedoms that you received.
-You must make sure that they, too, receive or can get the source code. And you
-must show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute
- and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that
-there is no warranty for this free software. For both users' and authors' sake,
-the GPL requires that modified versions be marked as changed, so that their
-problems will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified
-versions of the software inside them, although the manufacturer can do so. This
-is fundamentally incompatible with the aim of protecting users' freedom to
-change the software. The systematic pattern of such abuse occurs in the area of
-products for individuals to use, which is precisely where it is most
-unacceptable. Therefore, we have designed this version of the GPL to prohibit
-the practice for those products. If such problems arise substantially in other
-domains, we stand ready to extend this provision to those domains in future
-versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States
-should not allow patents to restrict development and use of software on
-general-purpose computers, but in those that do, we wish to avoid the special
-danger that patents applied to a free program could make it effectively
-proprietary. To prevent this, the GPL assures that patents cannot be used to
-render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification
-follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works,
-such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License.
-Each licensee is addressed as *you*. *Licensees* and *recipients* may be
-individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a
-fashion requiring copyright permission, other than the making of an exact copy.
-The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the
-Program.
-
-To *propagate* a work means to do anything with it that, without permission,
-would make you directly or secondarily liable for infringement under applicable
-copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification),
-making available to the public, and in some countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to
-make or receive copies. Mere interaction with a user through a computer
-network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the
-extent that it includes a convenient and prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the
- extent that warranties are provided), that licensees may convey the work
- under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a
-prominent item in the list meets this criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making
-modifications to it. *Object code* means any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard
-defined by a recognized standards body, or, in the case of interfaces specified
-for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the
-work as a whole, that (a) is included in the normal form of packaging a Major
-Component, but which is not part of that Major Component, and (b) serves only
-to enable use of the work with that Major Component, or to implement a Standard
-Interface for which an implementation is available to the public in source code
-form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on
-which the executable work runs, or a compiler used to produce the work, or an
-object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source
-code needed to generate, install, and (for an executable work) run the object
-code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose
-tools or generally available free programs which are used unmodified in
-performing those activities but which are not part of the work. For example,
-Corresponding Source includes interface definition files associated with source
-files for the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require, such as
-by intimate data communication or control flow between those subprograms and
-other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate
-automatically from other parts of the Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on
-the Program, and are irrevocable provided the stated conditions are met. This
-License explicitly affirms your unlimited permission to run the unmodified
-Program. The output from running a covered work is covered by this License only
-if the output, given its content, constitutes a covered work. This License
-acknowledges your rights of fair use or other equivalent, as provided by
-copyright law.
-
-You may make, run and propagate covered works that you do not convey, without
-conditions so long as your license otherwise remains in force. You may convey
-covered works to others for the sole purpose of having them make modifications
-exclusively for you, or provide you with facilities for running those works,
-provided that you comply with the terms of this License in conveying all
-material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your
-direction and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the
-conditions stated below. Sublicensing is not allowed; section 10 makes it
-unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure
-under any applicable law fulfilling obligations under article 11 of the WIPO
-copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention is
-effected by exercising rights under this License with respect to the covered
-work, and you disclaim any intention to limit operation or modification of the
-work as a means of enforcing, against the work's users, your or third parties'
-legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it,
-in any medium, provided that you conspicuously and appropriately publish on
-each copy an appropriate copyright notice; keep intact all notices stating that
-this License and any non-permissive terms added in accord with section 7 apply
-to the code; keep intact all notices of the absence of any warranty; and give
-all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may
-offer support or warranty protection for a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it
-from the Program, in the form of source code under the terms of section 4,
-provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and
- giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under
- this License and any conditions added under section 7. This requirement
- modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to
- anyone who comes into possession of a copy. This License will therefore
- apply, along with any applicable section 7 additional terms, to the whole
- of the work, and all its parts, regardless of how they are packaged. This
- License gives no permission to license the work in any other way, but it
- does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your work need
- not make them do so.
-
-A compilation of a covered work with other separate and independent works,
-which are not by their nature extensions of the covered work, and which are not
-combined with it such as to form a larger program, in or on a volume of a
-storage or distribution medium, is called an *aggregate* if the compilation and
-its resulting copyright are not used to limit the access or legal rights of the
-compilation's users beyond what the individual works permit. Inclusion of a
-covered work in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4
-and 5, provided that you also convey the machine-readable Corresponding Source
-under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including
- a physical distribution medium), accompanied by the Corresponding Source
- fixed on a durable physical medium customarily used for software
- interchange.
-- b) Convey the object code in, or embodied in, a physical product (including
- a physical distribution medium), accompanied by a written offer, valid for
- at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the
- object code either
-1. a copy of the Corresponding Source for all the software in the product
- that is covered by this License, on a durable physical medium
- customarily used for software interchange, for a price no more than your
- reasonable cost of physically performing this conveying of source, or
-2. access to copy the Corresponding Source from a network server at no
- charge.
-- c) Convey individual copies of the object code with a copy of the written
- offer to provide the Corresponding Source. This alternative is allowed only
- occasionally and noncommercially, and only if you received the object code
- with such an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding
- Source in the same way through the same place at no further charge. You
- need not require recipients to copy the Corresponding Source along with the
- object code. If the place to copy the object code is a network server, the
- Corresponding Source may be on a different server operated by you or a
- third party) that supports equivalent copying facilities, provided you
- maintain clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the Corresponding
- Source, you remain obligated to ensure that it is available for as long as
- needed to satisfy these requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you
- inform other peers where the object code and Corresponding Source of the
- work are being offered to the general public at no charge under subsection
- 6d.
-
-A separable portion of the object code, whose source code is excluded from the
-Corresponding Source as a System Library, need not be included in conveying the
-object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is
- normally used for personal, family, or household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be
-resolved in favor of coverage. For a particular product received by a
-particular user, *normally used* refers to a typical or common use of that
-class of product, regardless of the status of the particular user or of the way
-in which the particular user actually uses, or expects or is expected to use,
-the product. A product is a consumer product regardless of whether the product
-has substantial commercial, industrial or non-consumer uses, unless such uses
-represent the only significant mode of use of the product.
-
-*Installation Information* for a User Product means any methods, procedures,
-authorization keys, or other information required to install and execute
-modified versions of a covered work in that User Product from a modified
-version of its Corresponding Source. The information must suffice to ensure
-that the continued functioning of the modified object code is in no case
-prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as part of a
-transaction in which the right of possession and use of the User Product is
-transferred to the recipient in perpetuity or for a fixed term (regardless of
-how the transaction is characterized), the Corresponding Source conveyed under
-this section must be accompanied by the Installation Information. But this
-requirement does not apply if neither you nor any third party retains the
-ability to install modified object code on the User Product (for example, the
-work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates for a
-work that has been modified or installed by the recipient, or for the User
-Product in which it has been modified or installed. Access to a network may be
-denied when the modification itself materially and adversely affects the
-operation of the network or violates the rules and protocols for communication
-across the network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord
-with this section must be in a format that is publicly documented (and with an
-implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by
-making exceptions from one or more of its conditions. Additional permissions
-that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable
-law. If additional permissions apply only to part of the Program, that part may
-be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any
-additional permissions from that copy, or from any part of it. (Additional
-permissions may be written to require their own removal in certain cases when
-you modify the work.) You may place additional permissions on material, added
-by you to a covered work, for which you have or can give appropriate copyright
-permission.
-
-Notwithstanding any other provision of this License, for material you add to a
-covered work, you may (if authorized by the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of
- sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author
- attributions in that material or in the Appropriate Legal Notices displayed
- by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in reasonable
- ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors
- of the material; or
-- e) Declining to grant rights under trademark law for use of some trade
- names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by
- anyone who conveys the material (or modified versions of it) with
- contractual assumptions of liability to the recipient, for any liability
- that these contractual assumptions directly impose on those licensors and
- authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any
-part of it, contains a notice stating that it is governed by this License along
-with a term that is a further restriction, you may remove that term. If a
-license document contains a further restriction but permits relicensing or
-conveying under this License, you may add to a covered work material governed
-by the terms of that license document, provided that the further restriction
-does not survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place,
-in the relevant source files, a statement of the additional terms that apply to
-those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a
-separately written license, or stated as exceptions; the above requirements
-apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided
-under this License. Any attempt otherwise to propagate or modify it is void,
-and will automatically terminate your rights under this License (including any
-patent licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a
-particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and
- finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the
- violation by some reasonable means prior to 60 days after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated
-permanently if the copyright holder notifies you of the violation by some
-reasonable means, this is the first time you have received notice of violation
-of this License (for any work) from that copyright holder, and you cure the
-violation prior to 30 days after your receipt of the notice.
-
-Termination of your rights under this section does not terminate the licenses
-of parties who have received copies or rights from you under this License. If
-your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy
-of the Program. Ancillary propagation of a covered work occurring solely as a
-consequence of using peer-to-peer transmission to receive a copy likewise does
-not require acceptance. However, nothing other than this License grants you
-permission to propagate or modify any covered work. These actions infringe
-copyright if you do not accept this License. Therefore, by modifying or
-propagating a covered work, you indicate your acceptance of this License to do
-so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a
-license from the original licensors, to run, modify and propagate that work,
-subject to this License. You are not responsible for enforcing compliance by
-third parties with this License.
-
-An *entity transaction* is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered work
-results from an entity transaction, each party to that transaction who receives
-a copy of the work also receives whatever licenses to the work the party's
-predecessor in interest had or could give under the previous paragraph, plus a
-right to possession of the Corresponding Source of the work from the
-predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights
-granted or affirmed under this License. For example, you may not impose a
-license fee, royalty, or other charge for exercise of rights granted under this
-License, and you may not initiate litigation (including a cross-claim or
-counterclaim in a lawsuit) alleging that any patent claim is infringed by
-making, using, selling, offering for sale, or importing the Program or any
-portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of
-the Program or a work on which the Program is based. The work thus licensed is
-called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or
-controlled by the contributor, whether already acquired or hereafter acquired,
-that would be infringed by some manner, permitted by this License, of making,
-using, or selling its contributor version, but do not include claims that would
-be infringed only as a consequence of further modification of the contributor
-version. For purposes of this definition, *control* includes the right to grant
-patent sublicenses in a manner consistent with the requirements of this
-License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent
-license under the contributor's essential patent claims, to make, use, sell,
-offer for sale, import and otherwise run, modify and propagate the contents of
-its contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement
-or commitment, however denominated, not to enforce a patent (such as an express
-permission to practice a patent or covenant not to sue for patent
-infringement). To *grant* such a patent license to a party means to make such
-an agreement or commitment not to enforce a patent against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the
-Corresponding Source of the work is not available for anyone to copy, free of
-charge and under the terms of this License, through a publicly available
-network server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this
- particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to
- extend the patent license to downstream recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent
-license, your conveying the covered work in a country, or your recipient's use
-of the covered work in a country, would infringe one or more identifiable
-patents in that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you
-convey, or propagate by procuring conveyance of, a covered work, and grant a
-patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work,
-then the patent license you grant is automatically extended to all recipients
-of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of
-its coverage, prohibits the exercise of, or is conditioned on the non-exercise
-of one or more of the rights that are specifically granted under this License.
-You may not convey a covered work if you are a party to an arrangement with a
-third party that is in the business of distributing software, under which you
-make payment to the third party based on the extent of your activity of
-conveying the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory patent
-license
-
-- a) in connection with copies of the covered work conveyed by you (or copies
- made from those copies), or
-- b) primarily for and in connection with specific products or compilations
- that contain the covered work, unless you entered into that arrangement, or
- that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied
-license or other defenses to infringement that may otherwise be available to
-you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not excuse
-you from the conditions of this License. If you cannot convey a covered work so
-as to satisfy simultaneously your obligations under this License and any other
-pertinent obligations, then as a consequence you may not convey it at all. For
-example, if you agree to terms that obligate you to collect a royalty for
-further conveying from those to whom you convey the Program, the only way you
-could satisfy both those terms and this License would be to refrain entirely
-from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to
-link or combine any covered work with a work licensed under version 3 of the
-GNU Affero General Public License into a single combined work, and to convey
-the resulting work. The terms of this License will continue to apply to the
-part which is the covered work, but the special requirements of the GNU Affero
-General Public License, section 13, concerning interaction through a network
-will apply to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU
-General Public License from time to time. Such new versions will be similar in
-spirit to the present version, but may differ in detail to address new problems
-or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies
-that a certain numbered version of the GNU General Public License *or any later
-version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by
-the Free Software Foundation. If the Program does not specify a version number
-of the GNU General Public License, you may choose any version ever published by
-the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the
-GNU General Public License can be used, that proxy's public statement of
-acceptance of a version permanently authorizes you to choose that version for
-the Program.
-
-Later license versions may give you additional or different permissions.
-However, no additional obligations are imposed on any author or copyright
-holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
-LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
-PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER
-EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
-QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
-DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
-CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
-COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
-PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
-THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
-INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE
-PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY
-HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot
-be given local legal effect according to their terms, reviewing courts shall
-apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of
-liability accompanies a copy of the Program in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible
-use to the public, the best way to achieve this is to make it free software
-which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach
-them to the start of each source file to most effectively state the exclusion
-of warranty; and each file should have at least the *copyright* line and a
-pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like
-this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate
-parts of the General Public License. Of course, your program's commands might
-be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if
-any, to sign a *copyright disclaimer* for the program, if necessary. For more
-information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may consider
-it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead
-of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-esp-sdk/build.gradle b/lc-esp-sdk/build.gradle
deleted file mode 100644
index e016fc30afa9d8754ffaeca4290d5ab0756d68ac..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/build.gradle
+++ /dev/null
@@ -1,28 +0,0 @@
-plugins {
- id 'java'
- id 'java-library'
-}
-
-group 'leighco'
-version '18.1-SNAPSHOT'
-
-repositories {
- mavenCentral()
-}
-
-sourceCompatibility = JavaVersion.VERSION_11
-targetCompatibility = JavaVersion.VERSION_11
-
-dependencies {
- api project(':lc-zero-sdk')
- api project(':lc-eo-json')
- api project(':lc-eo-changestream')
- // https://mvnrepository.com/artifact/org.apache.activemq/activemq-client
- api group: 'org.apache.activemq', name: 'activemq-client', version: '5.17.1'
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/AsyncSender.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/AsyncSender.java
deleted file mode 100644
index 80c211db920dbc383e18f238e6cb9ecf4647095a..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/AsyncSender.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package lc.esp.sdk;
-
-import lc.mecha.util.BasicallyDangerous;
-
-import java.util.concurrent.LinkedBlockingQueue;
-
-/**
- * Implementation of an asynchronous sender for {@link ESPMessage} objects. This class
- * is useful when you are in a critical section of code (such as a game tick handler)
- * and want to send a message, but are unwilling to incur any delay.
- *
- * Messages are enqueued in a {@link LinkedBlockingQueue} and a dedicated thread transmits them at rate.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class AsyncSender extends BasicallyDangerous {
- private final ESPProducer producer;
- private final LinkedBlockingQueue q = new LinkedBlockingQueue<>();
-
- public AsyncSender(ESPProducer producer) {
- this.producer = producer;
- }
-
- public void send(ESPMessage msg) {
- q.add(msg);
- }
-
- @Override
- public void runDangerously() throws Exception {
- //noinspection InfiniteLoopStatement
- while (true) {
- producer.send(q.take());
- }
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddress.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddress.java
deleted file mode 100644
index 36301ac714debae564f8f2d1bdffe0c880459c75..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddress.java
+++ /dev/null
@@ -1,226 +0,0 @@
-package lc.esp.sdk;
-
-import com.anthonynsimon.url.URL;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.StringAccumulatorV2;
-import org.apache.activemq.command.ActiveMQQueue;
-import org.apache.activemq.command.ActiveMQTopic;
-
-import javax.jms.Destination;
-import javax.jms.IllegalStateException;
-import javax.jms.JMSException;
-import javax.jms.Session;
-import java.util.Locale;
-import java.util.UUID;
-
-/**
- * This class holds a destination address for {@link ESPMessage}s.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class ESPAddress {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPAddress.class);
- public static final String VIRTUAL_TOPIC_PREFIX = "ESP";
- private final String org;
- private final String domain;
- private final String service;
- private final String name;
- private final ESPAddressClass addressClass;
- private final ESPMessageClass messageClass;
- private final String consumerId;
-
- public ESPAddress(Destination dst) throws JMSException {
- logger.trace("Got destination: {} {}", dst.getClass().toString(), dst.toString());
-
- if (dst instanceof ActiveMQQueue) {
- // If this is a queue, it is possible it's a virtual consumer. See: ATG-51
- ActiveMQQueue q = (ActiveMQQueue) dst;
- String[] path = q.getQueueName().split("\\.");
-
- if (path.length == 5) {
- this.org = path[0];
- this.domain = path[1];
- this.service = path[2];
- this.name = path[3];
- this.messageClass = ESPMessageClass.val(path[4]);
- this.addressClass = ESPAddressClass.QUEUE;
- this.consumerId = makeConsumerId();
- } else if (path.length == 7) {
- this.consumerId = path[1];
- this.org = path[2];
- this.domain = path[3];
- this.service = path[4];
- this.name = path[5];
- this.messageClass = ESPMessageClass.val(path[6]);
- this.addressClass = ESPAddressClass.QUEUE;
- } else {
- throw new IllegalStateException("Unknown address format: " + dst);
- }
- } else if (dst instanceof ActiveMQTopic) {
- ActiveMQTopic q = (ActiveMQTopic) dst;
- String[] path = q.getTopicName().split("\\.");
- this.org = path[0];
- this.domain = path[1];
- this.service = path[2];
- this.name = path[3];
- this.messageClass = ESPMessageClass.val(path[4]);
- this.addressClass = ESPAddressClass.TOPIC;
- this.consumerId = makeConsumerId();
- } else {
- throw new IllegalStateException("Unknown destination type.");
- }
- }
-
- private String makeConsumerId() {
- return UUID.randomUUID().toString();
- }
-
- public ESPAddress(String org, String domain, String service, String name,
- ESPAddressClass serviceClass, ESPMessageClass destClass) {
- this.org = org;
- this.domain = domain;
- this.service = service;
- this.name = name;
- this.addressClass = serviceClass;
- this.messageClass = destClass;
- this.consumerId = makeConsumerId();
- }
-
- /**
- * Parses a URL in string format.
- *
- * queue://name
- * topic://name
- */
- public ESPAddress(String address) throws Exception {
- URL addrUrl = URL.parse(address);
-
- String[] host = addrUrl.getHostname().split("\\.");
- this.addressClass = ESPAddressClass.val(addrUrl.getScheme());
- this.org = host[0];
- this.domain = host[1];
- this.service = host[2];
- this.name = host[3];
- this.messageClass = ESPMessageClass.val(host[4]);
- this.consumerId = addrUrl.getUsername();
- }
-
- public String getDomain() {
- return domain;
- }
-
- public String getService() {
- return service;
- }
-
- public String getName() {
- return name;
- }
-
- public ESPAddressClass getAddressClass() {
- return addressClass;
- }
-
- public ESPMessageClass getMessageClass() {
- return messageClass;
- }
-
- public String getOrg() {
- return org;
- }
-
- @Override
- public String toString() {
- return "ESPDestination{" +
- "org='" + org + '\'' +
- ", domain='" + domain + '\'' +
- ", service='" + service + '\'' +
- ", name='" + name + '\'' +
- ", serviceClass=" + addressClass +
- ", destClass=" + messageClass +
- '}';
- }
-
- public String toOpenWireBase() {
- StringAccumulatorV2 sa = new StringAccumulatorV2(".");
- sa.push(org.toUpperCase(Locale.ROOT));
- sa.push(domain.toUpperCase(Locale.ROOT));
- sa.push(service.toUpperCase(Locale.ROOT));
- sa.push(name.toUpperCase(Locale.ROOT));
- return sa.asString();
- }
-
- /**
- * Returns this destination in OpenWire format. Note that the OpenWire address does not include the service
- * class, so the caller will need to deal with that appropriately.
- */
- public String toOpenWireAddress() {
- StringAccumulatorV2 sa = new StringAccumulatorV2(".");
- sa.push(org.toUpperCase(Locale.ROOT));
- sa.push(domain.toUpperCase(Locale.ROOT));
- sa.push(service.toUpperCase(Locale.ROOT));
- sa.push(name.toUpperCase(Locale.ROOT));
- sa.push(messageClass.toString().toUpperCase(Locale.ROOT));
- return sa.asString();
- }
-
- public String toMQTTAddress() {
- StringAccumulatorV2 sa = new StringAccumulatorV2("/");
- sa.push(org.toUpperCase(Locale.ROOT));
- sa.push(domain.toUpperCase(Locale.ROOT));
- sa.push(service.toUpperCase(Locale.ROOT));
- sa.push(name.toUpperCase(Locale.ROOT));
- sa.push(messageClass.toString().toUpperCase(Locale.ROOT));
- return sa.asString();
- }
-
- /**
- * Return this destination in STOMP format.
- */
- public String toStompAddress() {
- StringAccumulatorV2 sa = new StringAccumulatorV2("/");
- sa.push(addressClass.name().toLowerCase(Locale.ROOT));
- sa.push(org.toLowerCase(Locale.ROOT));
- sa.push(domain.toLowerCase(Locale.ROOT));
- sa.push(service.toLowerCase(Locale.ROOT));
- sa.push(name.toLowerCase(Locale.ROOT));
- sa.push(messageClass.toString().toUpperCase(Locale.ROOT));
- return sa.asString();
- }
-
- public String getConsumerId() {
- return consumerId;
- }
-
- /**
- * Convert this address to a JMS destination. If the address is for a topic, it will be prefixed with the
- * provided consumer ID. Note that the consumer ID is NOT case sensitive. For the purpose of building the
- * destination it will be converted to upper-case.
- */
- public Destination toConsumerDestination(Session session) throws JMSException {
- if (addressClass == ESPAddressClass.QUEUE) {
- return session.createQueue(toOpenWireAddress());
- } else if (addressClass == ESPAddressClass.TOPIC) {
- return session.createQueue(VIRTUAL_TOPIC_PREFIX +
- "." + consumerId.toUpperCase(Locale.ROOT) + "." + toOpenWireAddress());
- } else {
- throw new IllegalStateException("Unknown service class.");
- }
- }
-
- /**
- * Convert the address to a JMS destination without any support for virtual topics.
- */
- public Destination toDestination(Session session) throws JMSException {
- if (addressClass == ESPAddressClass.QUEUE) {
- return session.createQueue(toOpenWireAddress());
- } else if (addressClass == ESPAddressClass.TOPIC) {
- return session.createTopic(toOpenWireAddress());
- } else {
- throw new IllegalStateException("Unknown service class.");
- }
- }
-}
-
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddressClass.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddressClass.java
deleted file mode 100644
index 9edc63a7f2d1b8a975e8a56acb34e25054750154..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPAddressClass.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package lc.esp.sdk;
-
-import java.util.Locale;
-
-/**
- * This enum defines the semantics for an {@link ESPAddress}. The two options available are topic or queue whose
- * nomenclature lines up with JMS and other popular Message Queue implementations.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public enum ESPAddressClass {
- TOPIC, QUEUE;
-
- public static ESPAddressClass val(String str) {
- switch (str.toLowerCase(Locale.ROOT)) {
- case "topic":
- return TOPIC;
- case "queue":
- return QUEUE;
- }
- throw new IllegalStateException("Unknown class: " + str);
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPClient.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPClient.java
deleted file mode 100644
index 15857593cddbd4003da90634d74dc97592eaf4c4..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPClient.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package lc.esp.sdk;
-
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-import lc.zero.sdk.ZeroActivationIndex;
-import lc.zero.sdk.ZeroClient;
-import lc.zero.sdk.ZeroServiceConfig;
-import org.apache.activemq.ActiveMQConnectionFactory;
-
-import javax.jms.Connection;
-import javax.jms.IllegalStateException;
-import javax.jms.JMSException;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Enhanced Services Platform mk17 client.
- *
- * This implementation is essentially a wrapper around the ActiveMQ JMS client. We are not adding much functionality
- * here other than some protocol semantics which are peculiar to ESP and ensuring that the right dependencies
- * are brought into the end-user's project.
- *
- * In the future the client will also handle discovery and authentication. Note that authentication with ESP is
- * peculiar, so the regular username/password available via normal JMS will not work.
- *
- * @author Alex Leigh
- * @since mk17 (GIPSY DANGER)
- */
-public class ESPClient implements AutoCloseable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPClient.class);
- private ActiveMQConnectionFactory connectionFactory;
- private Connection connection;
- private final ZeroClient zero = new ZeroClient();
- public static final String SVC_ESP = "lc.esp";
-
- public ESPClient() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
- UniversalJob.banner(logger, "Enhanced Services Platform/SDK mk18 (GIPSY DANGER)");
- }
-
- /**
- * Returns the ESPClient's {@link ZeroClient} to the caller. The caller can use this to determine other
- * application-specific service configuration. If start()
has not been called, this method
- * will return null
.
- */
- public ZeroClient getZero() {
- return zero;
- }
-
- /**
- * Start the ESP client. This will perform Zero activation. The method blocks until the zero activation
- * is successful.
- */
- public ESPClient start() throws Exception {
- new Thread(zero).start();
- ZeroActivationIndex zai;
-
- zai = zero.getZai();
-
- ZeroServiceConfig espCfg = zai.readConfig(SVC_ESP);
-
- if (espCfg == null) {
- throw new IllegalStateException("No service configuration found in ZERO.");
- }
-
- JSONObject connections = espCfg.getCfg().getJSONObject("connections");
- String openWireUrl = connections.getString("openwire");
-
- logger.info("Learned configuration from ZERO. [openwire: {}]", openWireUrl);
-
- connectionFactory = new ActiveMQConnectionFactory(openWireUrl);
- connectionFactory.setUseAsyncSend(true);
- connection = getConnectionFactory().createConnection();
- connection.start();
- return this;
- }
-
- /**
- * Return the connection factory associated with this client.
- */
- public ActiveMQConnectionFactory getConnectionFactory() {
- return connectionFactory;
- }
-
- @Override
- public void close() throws Exception {
- if (connection != null) connection.close();
- }
-
- public ESPSession createSession() throws JMSException {
- return new ESPSession(connection);
- }
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPConsumer.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPConsumer.java
deleted file mode 100644
index 95329908b217b30e19fbc4da4cf5d15d3af835c6..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPConsumer.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package lc.esp.sdk;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import javax.jms.*;
-
-import static java.lang.Thread.sleep;
-
-/**
- * This class implements a message consumer which synchronously receives messages from an ESP/ENGINE over the
- * OpenWire protocol.
- *
- * The consumer supports a consumerId. This uniquely identifies the consumer in such a manner that all consumers
- * with the same consumer ID will compete for messages sent to a topic such that only one of the consumers
- * will receive any given messages. See ATG-51 for more information.
- *
- * @author Alex Leigh
- * @since 18.0 (GIPSY DANGER)
- */
-public class ESPConsumer implements AutoCloseable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPConsumer.class);
- private final MessageConsumer consumer;
-
- public ESPConsumer(ESPAddress destination, Session session) throws JMSException {
- Destination d = destination.toConsumerDestination(session);
- logger.debug("Created consumer. [destination: {}]", d);
- consumer = session.createConsumer(d);
- }
-
- public ESPMessage receive() throws InterruptedException {
- while (true) {
- try {
- Message msg = consumer.receive();
- logger.debug("Received message: {}", msg);
- return new ESPMessage(msg);
- } catch (Exception e) {
- // TODO: Demote to debug when we are ready
- logger.info("Received invalid message: " + e);
- sleep(100);
- }
- }
- }
-
- @Override
- public void close() throws Exception {
- consumer.close();
- }
-
- @Override
- public String toString() {
- return "ESPConsumer{" +
- "consumer=" + consumer +
- '}';
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessage.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessage.java
deleted file mode 100644
index ec7ac237acc65fe73018a354176ae46ca9e6e3d9..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessage.java
+++ /dev/null
@@ -1,206 +0,0 @@
-package lc.esp.sdk;
-
-import lc.eo.EO;
-import lc.eo.EOJsonSerializer;
-import lc.mecha.json.JSONArray;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.apache.activemq.command.ActiveMQBytesMessage;
-
-import javax.jms.IllegalStateException;
-import javax.jms.*;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Objects;
-
-public class ESPMessage {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPMessage.class);
- public static final String KEY_PAYLOAD = "payload";
- public static final String KEY_PARAMETERS = "parameters";
- public static final String KEY_NAME = "name";
- public static final String KEY_VALUE = "value";
-
- private JSONObject payload;
- private ESPAddress replyTo;
- private ESPAddress destination;
- private String correlationId;
- private final Map parameters = new HashMap<>();
-
- public ESPMessage(Message msg) throws JMSException {
- logger.debug("Building from message: {}", msg);
-
- Destination dest = msg.getJMSDestination();
- if (dest != null) {
- destination = new ESPAddress(dest);
- }
-
- correlationId = msg.getJMSCorrelationID();
-
- Destination dst = msg.getJMSReplyTo();
- if (dst != null) {
- replyTo = new ESPAddress(dst);
- }
-
- for (Iterator it = msg.getPropertyNames().asIterator(); it.hasNext(); ) {
- String param = it.next();
- parameters.put(param, (Serializable) msg.getObjectProperty(param));
- }
- if (msg instanceof TextMessage) {
- payload = new JSONObject(((TextMessage) msg).getText());
- } else if (msg instanceof ActiveMQBytesMessage) {
- String s = new String(((ActiveMQBytesMessage) msg).getContent().getData());
- payload = new JSONObject(s);
- } else {
- throw new IllegalStateException("Unknown message type: " + msg.getClass());
- }
- }
-
- /**
- * Serialize the entire message into JSON.
- */
- public JSONObject toJson() {
- JSONObject o = new JSONObject();
- o.put(KEY_PAYLOAD, payload);
- JSONArray arr = new JSONArray();
- for (Map.Entry param : parameters.entrySet()) {
- arr.put(new JSONObject().put(KEY_NAME, param.getKey()).put(KEY_VALUE, param.getValue()));
- }
- if (arr.length() > 0) o.put(KEY_PARAMETERS, arr);
- return o;
- }
-
- /**
- * Return the payload of this message in JSON format.
- */
- public JSONObject getPayload() {
- return payload;
- }
-
- public EO getPayloadEO() {
- logger.debug("Decoding message payload: {}", payload);
- return EOJsonSerializer.toEo(payload);
- }
-
- /**
- * Set a parameter in the message. This parameter may later be used to filter
- * messages during the subscription process (by creating a selector).
- *
- * All parameters which begin with underscore are reserved by ESP.
- */
- public void setParameter(String name, Serializable value) {
- parameters.put(name, value);
- }
-
- public ESPMessage() {
-
- }
-
- public Message toMessage(Session session) throws JMSException {
- TextMessage jms = session.createTextMessage(getPayload().toString());
-
- for (Map.Entry param : getParameters().entrySet()) {
- jms.setObjectProperty(param.getKey(), param.getValue());
- }
-
- String correlationId = getCorrelationId();
- if (correlationId != null) jms.setJMSCorrelationID(correlationId);
-
- ESPAddress replyTo = getReplyTo();
- if (replyTo != null) {
- jms.setJMSReplyTo(replyTo.toDestination(session));
- }
-
- return jms;
- }
-
- public Serializable getParameter(String name) {
- return parameters.get(name);
- }
-
- public ESPMessage(JSONObject payload, ESPAddress replyTo, String correlationId) {
- this.payload = payload;
- this.replyTo = replyTo;
- this.correlationId = correlationId;
- }
-
- public void setPayload(JSONObject payload) {
- this.payload = payload;
- }
-
- /**
- * Set the payload of this ESPMessage to the given {@link EO}.
- *
- * @param payload The message.
- */
- public void setPayload(EO payload) {
- this.payload = EOJsonSerializer.toJson(payload);
- for (String key : payload.getKeys()) {
- if (!payload.isValueLoop(key)) {
- // TODO: Natural numbers would have been fine too, no need to convert to string
- String s = payload.getValueString(key);
- setParameter("lc.eo." + key, s);
- }
- }
- String type = payload.getType();
- setParameter("lc.type", type);
- }
-
- public ESPMessage(JSONObject payload) {
- this(payload, null, null);
- }
-
- public ESPMessage(EO payload) {
- this(EOJsonSerializer.toJson(payload));
- }
-
- public String getCorrelationId() {
- return correlationId;
- }
-
- public Map getParameters() {
- return parameters;
- }
-
- @Override
- public String toString() {
- return "ESPMessage{" +
- "payload=" + payload +
- ", replyTo=" + replyTo +
- ", destination=" + destination +
- ", correlationId='" + correlationId + '\'' +
- ", parameters=" + parameters +
- '}';
- }
-
- public ESPAddress getReplyTo() {
- return replyTo;
- }
-
- public ESPAddress getDestination() {
- return destination;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- ESPMessage that = (ESPMessage) o;
- return Objects.equals(payload, that.payload);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(payload);
- }
-
- public void setReplyTo(ESPAddress replyTo) {
- this.replyTo = replyTo;
- }
-
- public void setCorrelationId(String correlationId) {
- this.correlationId = correlationId;
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessageClass.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessageClass.java
deleted file mode 100644
index cfde44efc150f6f9708df0c6a205fbd7263d8700..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPMessageClass.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package lc.esp.sdk;
-
-import java.util.Locale;
-
-/**
- * This enum contains valid destination classes. It is capable of converting the symantic meaning to the correct
- * abbreviation for ESP18 addresses.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public enum ESPMessageClass {
- TELEMETRY {
- public String toString() {
- return KEY_TLM;
- }
- },
- STREAM {
- public String toString() {
- return KEY_STREAM;
- }
- },
- COMMAND {
- public String toString() {
- return KEY_COMMAND;
- }
- };
-
- public static final String KEY_TLM = "tlm";
- public static final String KEY_STREAM = "stm";
- public static final String KEY_COMMAND = "cmd";
-
- public static ESPMessageClass val(String str) {
- switch (str.toLowerCase(Locale.ROOT)) {
- case KEY_TLM:
- return TELEMETRY;
- case KEY_COMMAND:
- return COMMAND;
- case KEY_STREAM:
- return STREAM;
- }
- throw new IllegalStateException("Unknown symbol: " + str);
- }
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPProducer.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPProducer.java
deleted file mode 100644
index 4f25efffa9bee3a88d135fc2189dc9fc3e0c249e..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPProducer.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package lc.esp.sdk;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import javax.jms.*;
-
-public class ESPProducer implements AutoCloseable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPProducer.class);
- private final MessageProducer producer;
- private final Session session;
-
- @Deprecated
- public ESPProducer(Session session, ESPAddress address) throws JMSException {
- this.session = session;
- producer = session.createProducer(address.toDestination(session));
- producer.setDeliveryMode(DeliveryMode.PERSISTENT);
- }
-
- public ESPProducer(Session session) throws JMSException {
- this.session = session;
- producer = session.createProducer(null);
- producer.setDeliveryMode(DeliveryMode.PERSISTENT);
- }
-
- @Deprecated
- public void send(ESPMessage msg) throws JMSException {
- logger.debug("Sending message... {}", msg);
- Message jms = msg.toMessage(session);
- producer.send(jms);
- }
-
- public void send(ESPAddress destination, ESPMessage msg) throws JMSException {
- logger.debug("Sending message... {}", msg);
- Message jms = msg.toMessage(session);
- producer.send(destination.toDestination(session), jms);
- }
-
- @Override
- public void close() throws Exception {
- // We leave it to ESPSession to close the session
- producer.close();
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPRequestor.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPRequestor.java
deleted file mode 100644
index 2d33eff286036597a39779d55805a04cd558228e..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPRequestor.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package lc.esp.sdk;
-
-import lc.mecha.lang.FutureResult;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-
-import javax.jms.*;
-import java.util.HashMap;
-import java.util.UUID;
-
-/**
- * This class implements a requestor which is capable of sending messages to JMS and waiting for a response.
- *
- * @author Alex Leigh
- * @since 18.1
- */
-public class ESPRequestor extends BasicallyDangerous implements AutoCloseable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPRequestor.class);
- private final MessageProducer producer;
- private final MessageConsumer consumer;
- private final Session session;
- private final ESPAddress srcAddress;
- // FIXME: This will leak futures. We need a timeout map.
- private final HashMap> futures = new HashMap<>();
-
- protected ESPRequestor(Session session) throws JMSException {
- this.session = session;
- srcAddress = new ESPAddress("lc", "client", "session",
- UUID.randomUUID().toString(), ESPAddressClass.QUEUE, ESPMessageClass.COMMAND);
- producer = session.createProducer(null);
- consumer = session.createConsumer(srcAddress.toDestination(session));
- }
-
- /**
- * Asynchronously send a message to ESP. A {@link FutureResult} will be returned. If a reply to this message
- * is received by the client the FutureResult will be fulfilled.
- */
- public FutureResult request(ESPAddress destination, ESPMessage espMessage)
- throws JMSException {
-
- FutureResult future = new FutureResult<>();
- String correlationId = UUID.randomUUID().toString();
- espMessage.setCorrelationId(correlationId);
- espMessage.setReplyTo(srcAddress);
- Message jms = espMessage.toMessage(session);
- futures.put(correlationId, future);
-
- synchronized (this) {
- producer.send(destination.toDestination(session), jms);
- }
-
- return future;
- }
-
- @Override
- public void close() {
- try {
- producer.close();
- } catch (Exception ignored) {
- // NOP
- }
-
- try {
- consumer.close();
- } catch (Exception ignored) {
- // NOP
- }
- }
-
- @Override
- public void runDangerously() throws Exception {
- //noinspection InfiniteLoopStatement
- while (true) {
- Message msg = consumer.receive();
- String correlationId = msg.getJMSCorrelationID();
- if (correlationId == null) {
- logger.warn("Received message with no correlation ID. {}", msg);
- break;
- }
-
- FutureResult future = futures.get(correlationId);
- if (future == null) {
- logger.warn("Received message, but no future. [correlationId: {}]", correlationId);
- break;
- }
-
- future.set(new ESPMessage(msg));
- }
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPSession.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPSession.java
deleted file mode 100644
index 922a8d4bce518db6d1454b4e36112f762faa0254..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/ESPSession.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package lc.esp.sdk;
-
-import javax.jms.Connection;
-import javax.jms.JMSException;
-import javax.jms.Session;
-
-public class ESPSession implements AutoCloseable {
- private final Session session;
-
- public ESPSession(Connection connection) throws JMSException {
- this.session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
- }
-
- public ESPConsumer createConsumer(ESPAddress destination) throws JMSException {
- return new ESPConsumer(destination, session);
- }
-
- @Deprecated
- public ESPProducer createProducer(ESPAddress destination) throws JMSException {
- return new ESPProducer(session, destination);
- }
-
- public ESPProducer createProducer() throws JMSException {
- return new ESPProducer(session);
- }
-
- public ESPRequestor createRequestor() throws JMSException {
- return new ESPRequestor(session);
- }
-
- @Override
- public void close() throws Exception {
- session.close();
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/SchemaGenerator.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/SchemaGenerator.java
deleted file mode 100644
index e4dd0ec10d0ea4501a2f56192c8d192b73427d75..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/SchemaGenerator.java
+++ /dev/null
@@ -1,60 +0,0 @@
-package lc.esp.sdk;
-
-import lc.eo.EO;
-import lc.eo.EODataType;
-import lc.eo.schema.DAOGenerator;
-import lc.eo.schema.ElementElementDAO;
-import lc.eo.schema.SchemaElementDAO;
-import lc.eo.schema.util.AttributeUtil;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-
-public class SchemaGenerator {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(SchemaGenerator.class);
- private static final String BASE_PKG = "lc.esp.v18";
-
- // Observe the procedures of a general alert
-
- public static EO generate() {
- final EO schema = lc.eo.schema.util.SchemaUtil.create(BASE_PKG + ".schema");
-
- final EO graphStateRequest = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(graphStateRequest);
- ElementElementDAO.setEoType(graphStateRequest, BASE_PKG + ".graph_state_request");
-
- final EO graphStateResponse = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(graphStateResponse);
- ElementElementDAO.setEoType(graphStateResponse, BASE_PKG + ".graph_state_response");
- ElementElementDAO.getAttributes(graphStateResponse).add(AttributeUtil.create("txid",
- "Transaction ID", EODataType.uuid));
- ElementElementDAO.getAttributes(graphStateResponse).add(AttributeUtil.create("graph",
- "Graph", EODataType.loop));
-
- final EO graphTxn = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(graphTxn);
- ElementElementDAO.setEoType(graphTxn, BASE_PKG + ".graph_txn");
- ElementElementDAO.getAttributes(graphTxn).add(AttributeUtil.create("txid",
- "Transaction ID", EODataType.uuid));
- ElementElementDAO.getAttributes(graphTxn).add(AttributeUtil.create("changeset",
- "Graph Deltas", EODataType.loop));
-
- return schema;
- }
-
- public static void main(final String... args) throws IOException {
- UniversalJob.banner(logger, "Apotheosis mk3", "2014-2022 Alex Leigh");
- final EO schema = generate();
- final DAOGenerator gen = new DAOGenerator(new File("lc-esp-sdk/src/main/java"));
- gen.generateJava("lc.esp.sdk.schema.v18", schema);
- try (PrintWriter pw = new PrintWriter(new FileWriter(new File("out.js")))) {
- gen.generateEcma5(pw, "lc.esp.v18", schema);
- }
- logger.info("Generated schema: {}", schema);
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_requestElementDAO.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_requestElementDAO.java
deleted file mode 100644
index 0430ab02758c7a46d2adb89e794052633fca653e..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_requestElementDAO.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.sdk.schema.v18;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Graph_state_requestElementDAO {
- public static final String API_TYPE = "lc.esp.v18.graph_state_request";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
-
- public static String apiType(final EO eo) {
- return eo.getType();
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_responseElementDAO.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_responseElementDAO.java
deleted file mode 100644
index 01e5442324c5fcb455a59a5e420547a5f30a13e0..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_state_responseElementDAO.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.sdk.schema.v18;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Graph_state_responseElementDAO {
- public static final String API_TYPE = "lc.esp.v18.graph_state_response";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_TXID = "txid";
- public static final String KEY_GRAPH = "graph";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=c80e1270-6bf7-4975-8aa2-9af1e12b95c9, data={eoType=txid, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=e543ce40-b6ec-40fd-9a37-69a86d59a601, data={text=Transaction ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
- public static java.util.UUID getTxid(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueUUID(KEY_TXID);
- }
-
- public static void setTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_TXID, value);
- }
-
- public static void setTxidLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_TXID, label);
- }
-
- public static void getLabelOrValueTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_TXID);
- }
-
- public static void setIfUnsetTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_TXID, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=eed7627e-73c0-4b70-ae43-1a875d3a1fee, data={eoType=graph, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=8e425555-6d0d-48ca-a469-957fe1cf4b68, data={text=Graph, locale=en}, meta=null}]} , type=loop}, meta=null} */
-
-
- public static lc.eo.EOLoop getGraph(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueLoop(KEY_GRAPH);
- }
-
- public static void setGraph(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_GRAPH, value);
- }
-
- public static void setGraphLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_GRAPH, label);
- }
-
- public static void getLabelOrValueGraph(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_GRAPH);
- }
-
- public static void setIfUnsetGraph(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_GRAPH, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_txnElementDAO.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_txnElementDAO.java
deleted file mode 100644
index fa3785557e19dd5e6453164963a8de128016f10a..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/schema/v18/Graph_txnElementDAO.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.sdk.schema.v18;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Graph_txnElementDAO {
- public static final String API_TYPE = "lc.esp.v18.graph_txn";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_TXID = "txid";
- public static final String KEY_CHANGESET = "changeset";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=08538584-26b7-4794-8f44-dcf9ec76a66e, data={eoType=txid, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=b4dd7c34-ac79-4a41-ac90-080d2d66a31e, data={text=Transaction ID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
- public static java.util.UUID getTxid(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueUUID(KEY_TXID);
- }
-
- public static void setTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_TXID, value);
- }
-
- public static void setTxidLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_TXID, label);
- }
-
- public static void getLabelOrValueTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_TXID);
- }
-
- public static void setIfUnsetTxid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_TXID, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=91ff25bd-324b-4ecf-879e-56e9854c16b6, data={eoType=changeset, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=43d5a41f-b806-47bb-ac4a-9a5cf90cfe8b, data={text=Graph Deltas, locale=en}, meta=null}]} , type=loop}, meta=null} */
-
-
- public static lc.eo.EOLoop getChangeset(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueLoop(KEY_CHANGESET);
- }
-
- public static void setChangeset(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_CHANGESET, value);
- }
-
- public static void setChangesetLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_CHANGESET, label);
- }
-
- public static void getLabelOrValueChangeset(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_CHANGESET);
- }
-
- public static void setIfUnsetChangeset(final EO eo, lc.eo.EOLoop value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_CHANGESET, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ChangeStreamer.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ChangeStreamer.java
deleted file mode 100644
index a3fef8c18d39aea66e388a78aceb5a55a615cb5b..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ChangeStreamer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package lc.esp.sdk.service;
-
-import lc.eo.EO;
-import lc.eo.EOLoop;
-import lc.eo.changestream.ChangeLogger;
-import lc.eo.changestream.schema.v1.Change_setElementDAO;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPMessage;
-import lc.esp.sdk.ESPProducer;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import java.util.ArrayList;
-
-/**
- * This class implements a {@link ChangeLogger} which transmits changes to an ESP destination.
- *
- * @author Alex Leigh
- * @since mk1
- */
-public class ChangeStreamer implements ChangeLogger {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ChangeStreamer.class);
- private final ESPProducer producer;
- private final ESPAddress destination;
-
- public ChangeStreamer(ESPProducer producer, ESPAddress destination) {
- this.producer = producer;
- this.destination = destination;
- }
-
- @Override
- public void comitted(long changeNumber, ArrayList changes) throws Exception {
- ESPMessage msg = new ESPMessage();
- EO changeSet = Change_setElementDAO.create();
- Change_setElementDAO.setChanges(changeSet, new EOLoop(changes));
- logger.info("Sending change message: {}", msg);
- producer.send(destination, msg);
- }
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ESPService.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ESPService.java
deleted file mode 100644
index e7b2c2cae1df9d6e4b24d135e90db7b1afaac210..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/service/ESPService.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package lc.esp.sdk.service;
-
-import lc.esp.sdk.*;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-/**
- * This class implements a basic ESP request/response server.
- *
- * @author Alex Leigh
- * @since 18.1
- */
-public abstract class ESPService implements Runnable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ESPService.class);
- private final ESPSession session;
- private final ESPAddress address;
-
- public ESPService(ESPSession session, ESPAddress address) {
- this.session = session;
- this.address = address;
- }
-
- @Override
- public void run() {
- try {
- try (ESPConsumer consumer = session.createConsumer(address)) {
- //noinspection InfiniteLoopStatement
- while (true) {
- ESPMessage request = consumer.receive();
- ESPAddress replyAddress = request.getReplyTo();
- if (replyAddress != null) {
- try (ESPProducer producer = session.createProducer(replyAddress)) {
- try {
- ESPMessage reply = handle(request);
- if (reply != null) {
- // We force the correlation ID to match the request.
- String correlationId = request.getCorrelationId();
- if (correlationId != null) {
- reply.setCorrelationId(correlationId);
- }
- producer.send(reply);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- } else {
- logger.warn("Caller did not supply a return address: {}", request);
- }
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public abstract ESPMessage handle(ESPMessage msg) throws Exception;
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetryFrame.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetryFrame.java
deleted file mode 100644
index b944b76bae7b04e8241d5e30e98b82b4b5a60215..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetryFrame.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package lc.esp.sdk.telemetry;
-
-import lc.esp.sdk.ESPMessage;
-import lc.mecha.json.JSONArray;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import java.io.Serializable;
-import java.time.Instant;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * A telemetry frame contains zero or more symbols which are grouped together into a single "reading".
- *
- * If symbols happen to be inter-related, for example, a X-Y-Z positional vector, then they are said to be of the
- * same reading if they are within the same frame. The frame constitutes a particular instant of time.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class TelemetryFrame {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(TelemetryFrame.class);
- public static final String KEY_TIME = "time";
- public static final String KEY_TIME_NS = "time_ns";
- public static final String KEY_SYMBOLS = "symbols";
- public static final String PREFIX_TLM = "esp.tlm.";
- private final HashSet symbols = new HashSet<>();
- private final HashMap tags = new HashMap<>();
- private final Instant time;
-
- /**
- * Create a TelemetryFrame object from a {@link ESPMessage}.
- *
- * All ESP message parameters that begin with "esp.tlm." will be copied into the TelemetryFrame with the TLM.
- * removed.
- *
- * @param msg The message.
- */
- public TelemetryFrame(ESPMessage msg) {
- JSONObject json = msg.getPayload();
-
- logger.trace("Payload: {}", json);
-
- if (json.has(KEY_TIME)) {
- time = Instant.ofEpochMilli(json.optLong(KEY_TIME));
- } else {
- // This is actually poor because we don't know how long
- // the thing has been sitting in the queue
- time = Instant.now();
- }
-
- JSONArray arr = json.getJSONArray(KEY_SYMBOLS);
- for (int i = 0; i < arr.length(); i++) {
- JSONObject symJson = arr.getJSONObject(i);
- symbols.add(new TelemetrySymbol(symJson));
- }
-
- for (Map.Entry param : msg.getParameters().entrySet()) {
- if (param.getKey().startsWith(PREFIX_TLM)) {
- String tagName = param.getKey().substring(PREFIX_TLM.length());
- tags.put(tagName, param.getValue());
- }
- }
- }
-
- public TelemetryFrame(Instant time) {
- this.time = time;
- }
-
- public Instant getTime() {
- return time;
- }
-
- public JSONObject toJson() {
- JSONObject frameJson = new JSONObject();
- frameJson.put(KEY_TIME, time.toEpochMilli());
- frameJson.put(KEY_TIME_NS, time.getNano());
- JSONArray symbolsArray = new JSONArray();
- for (TelemetrySymbol s : symbols) {
- symbolsArray.put(s.toJson());
- }
- if (symbolsArray.length() > 0) {
- frameJson.put(KEY_SYMBOLS, symbolsArray);
- }
- return frameJson;
- }
-
- /**
- * Convert this TelemetryFrame to an {@link ESPMessage} in the standard format. This will copy all of the
- * telemetry frame tags to the message parameters while prepending the keys with "esp.tlm.".
- */
- public ESPMessage toMessage() {
- ESPMessage msg = new ESPMessage();
- for (Map.Entry parameter : tags.entrySet()) {
- msg.getParameters().put(PREFIX_TLM + parameter.getKey(), parameter.getValue());
- }
- msg.setPayload(toJson());
- return msg;
- }
-
- public HashMap getTags() {
- return tags;
- }
-
- @Override
- public String toString() {
- return "TelemetryFrame{" +
- "symbols=" + symbols +
- ", tags=" + tags +
- ", time=" + time +
- '}';
- }
-
- /**
- * Return the symbols associated with this frame.
- */
- public Set getSymbols() {
- return symbols;
- }
-
- public void addSymbol(TelemetrySymbol symbol) {
- // This works on the principal that equals() is implemented properly
- symbols.add(symbol);
- }
-
-}
diff --git a/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetrySymbol.java b/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetrySymbol.java
deleted file mode 100644
index 85ccc45425b8bb77c1ca16d7605f9d156b3abc5c..0000000000000000000000000000000000000000
--- a/lc-esp-sdk/src/main/java/lc/esp/sdk/telemetry/TelemetrySymbol.java
+++ /dev/null
@@ -1,131 +0,0 @@
-package lc.esp.sdk.telemetry;
-
-import lc.mecha.json.JSONObject;
-
-import java.io.Serializable;
-import java.util.Objects;
-
-/**
- * This class holds data about a particular telemetry symbol reading.
- *
- * Symbols are meant to be aggregated into {@link TelemetryFrame}s. The frame contains important meta data
- * about the symbol, including the time the symbol was valid for.
- *
- * When designing symbols, keep in mind that the value of the symbol should never be a set, array, or object. The
- * value should be primitive. In cases where a reading is a vector, each part should be split up into an independent
- * symbol, and then all those symbols can be represented in a frame to indicated they occurred at the
- * same moment/reading.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class TelemetrySymbol {
- public static final String KEY_NAME = "name";
- public static final String KEY_VALUE = "value";
- public static final String KEY_UOM = "uom";
- public static final String KEY_LOS = "los";
- public static final String KEY_PRIMARY = "primary";
- private final String name;
- private final Serializable value;
- private final String uom;
- private final boolean isLos;
- private final boolean isPrimary;
-
- public TelemetrySymbol(JSONObject obj) {
- name = obj.getString(KEY_NAME);
- value = (Serializable) obj.get(KEY_VALUE);
- uom = obj.optString(KEY_UOM, null);
- isLos = obj.getBoolean(KEY_LOS);
- isPrimary = obj.getBoolean(KEY_PRIMARY);
- }
-
- public TelemetrySymbol(String name, Serializable value, boolean isPrimary) {
- this.name = name;
- this.value = value;
- this.uom = null;
- this.isLos = false;
- this.isPrimary = isPrimary;
- }
-
- public TelemetrySymbol(String name, Serializable value, String uom, boolean isLos, boolean isPrimary) {
- this.name = name;
- this.value = value;
- this.uom = uom;
- this.isLos = isLos;
- this.isPrimary = isPrimary;
- }
-
- public JSONObject toJson() {
- JSONObject obj = new JSONObject();
- obj.put(KEY_NAME, name);
- obj.put(KEY_VALUE, value);
- obj.put(KEY_UOM, uom);
- obj.put(KEY_LOS, isLos);
- obj.put(KEY_PRIMARY, isPrimary);
- return obj;
- }
-
- @Override
- public String toString() {
- return "TelemetrySymbol{" +
- "name='" + name + '\'' +
- ", value=" + value +
- ", uom='" + uom + '\'' +
- ", isLos=" + isLos +
- '}';
- }
-
- /**
- * Returns true
> of the symbol is in the Loss Of Signal (LOM) state.
- *
- * Symbols can be independently in the LOS state; this indicates that the source for the symbol has become unlocked.
- * The symbol may or may not contain a value; if it does, that value indicates the last known reading on the symbol
- * before the LOS event occurred.
- */
- public boolean isLos() {
- return isLos;
- }
-
- /**
- * Return the unit-of-measure, if any, for this symbol.
- */
- public String getUom() {
- return uom;
- }
-
- /**
- * Return the name for this symbol.
- */
- public String getName() {
- return name;
- }
-
- /**
- * Return the value of this symbol.
- */
- public Serializable getValue() {
- return value;
- }
-
- /**
- * Returns true
if this symbol value is primary. A value is considered to be primary if it is a direct
- * raw value such as that read directly from a sensor. A value is considered to be secondary if it is the product
- * of some calculation, including aggregations, sampling, or other formulas.
- */
- public boolean isPrimary() {
- return isPrimary;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- TelemetrySymbol that = (TelemetrySymbol) o;
- return Objects.equals(name, that.name);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name);
- }
-}
diff --git a/lc-esp-service-clock/LICENSE.md b/lc-esp-service-clock/LICENSE.md
deleted file mode 100644
index 692047911b05a4b04dea11e143db9fd219e63ce5..0000000000000000000000000000000000000000
--- a/lc-esp-service-clock/LICENSE.md
+++ /dev/null
@@ -1,490 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and other kinds of works.
-
-The licenses for most software and other practical works are designed to take away your freedom to share and change the
-works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use
-the GNU General Public License for most of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make
-sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive
-source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and
-that you know you can do these things.
-
-To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights.
-Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients
-the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must
-show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software.
-For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems
-will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified versions of the software inside them, although
-the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the
-software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely
-where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we stand ready to extend this provision to those
-domains in future versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States should not allow patents to restrict
-development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger
-that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License. Each licensee is addressed as *you*. *
-Licensees* and *recipients* may be individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
-other than the making of an exact copy. The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the Program.
-
-To *propagate* a work means to do anything with it that, without permission, would make you directly or secondarily
-liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification), making available to the public, and in some
-countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction
-with a user through a computer network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the extent that it includes a convenient and
-prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the extent that warranties are provided), that
- licensees may convey the work under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this
-criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making modifications to it. *Object code* means
-any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard defined by a recognized standards body,
-or, in the case of interfaces specified for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the work as a whole, that (a) is included in
-the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to
-enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is
-available to the public in source code form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a
-compiler used to produce the work, or an object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source code needed to generate, install, and (
-for an executable work) run the object code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs
-which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding
-Source includes interface definition files associated with source files for the work, and the source code for shared
-libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data
-communication or control flow between those subprograms and other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate automatically from other parts of the
-Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided
-the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program.
-The output from running a covered work is covered by this License only if the output, given its content, constitutes a
-covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
-
-You may make, run and propagate covered works that you do not convey, without conditions so long as your license
-otherwise remains in force. You may convey covered works to others for the sole purpose of having them make
-modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that
-prohibit them from making any copies of your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not
-allowed; section 10 makes it unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling
-obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the
-extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you
-disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users,
-your or third parties' legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating
-that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices
-of the absence of any warranty; and give all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for
-a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source
-code under the terms of section 4, provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under this License and any conditions added under
- section 7. This requirement modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy.
- This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and
- all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other
- way, but it does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program
- has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
-
-A compilation of a covered work with other separate and independent works, which are not by their nature extensions of
-the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or
-distribution medium, is called an *aggregate* if the compilation and its resulting copyright are not used to limit the
-access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
-- b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the object code either
- 1. a copy of the Corresponding Source for all the software in the product that is covered by this License, on a
- durable physical medium customarily used for software interchange, for a price no more than your reasonable cost
- of physically performing this conveying of source, or
- 2. access to copy the Corresponding Source from a network server at no charge.
-- c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source.
- This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such
- an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same
- place at no further charge. You need not require recipients to copy the Corresponding Source along with the object
- code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server
- operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions
- next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these
- requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and
- Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
-
-A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library,
-need not be included in conveying the object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is normally used for personal, family, or
- household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a
-particular product received by a particular user, *normally used* refers to a typical or common use of that class of
-product, regardless of the status of the particular user or of the way in which the particular user actually uses, or
-expects or is expected to use, the product. A product is a consumer product regardless of whether the product has
-substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of
-the product.
-
-*Installation Information* for a User Product means any methods, procedures, authorization keys, or other information
-required to install and execute modified versions of a covered work in that User Product from a modified version of its
-Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code
-is in no case prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the
-conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to
-the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding
-Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not
-apply if neither you nor any third party retains the ability to install modified object code on the User Product (for
-example, the work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a requirement to continue to provide support
-service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product
-in which it has been modified or installed. Access to a network may be denied when the modification itself materially
-and adversely affects the operation of the network or violates the rules and protocols for communication across the
-network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format
-that is publicly documented (and with an implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by making exceptions from one or more of
-its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable law. If additional permissions apply only
-to part of the Program, that part may be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or
-from any part of it. (Additional permissions may be written to require their own removal in certain cases when you
-modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have
-or can give appropriate copyright permission.
-
-Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by
-the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the
- Appropriate Legal Notices displayed by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material
- be marked in reasonable ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
-- e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified
- versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual
- assumptions directly impose on those licensors and authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that
-it is governed by this License along with a term that is a further restriction, you may remove that term. If a license
-document contains a further restriction but permits relicensing or conveying under this License, you may add to a
-covered work material governed by the terms of that license document, provided that the further restriction does not
-survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a
-statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as
-exceptions; the above requirements apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to
-propagate or modify it is void, and will automatically terminate your rights under this License (including any patent
-licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days
- after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you
-of the violation by some reasonable means, this is the first time you have received notice of violation of this
-License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the
-notice.
-
-Termination of your rights under this section does not terminate the licenses of parties who have received copies or
-rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a
-covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not
-require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered
-work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run,
-modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third
-parties with this License.
-
-An *entity transaction* is a transaction transferring control of an organization, or substantially all assets of one, or
-subdividing an organization, or merging organizations. If propagation of a covered work results from an entity
-transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work
-the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For
-example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License,
-and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent
-claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of the Program or a work on which the
-Program is based. The work thus licensed is called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or controlled by the contributor, whether already
-acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or
-selling its contributor version, but do not include claims that would be infringed only as a consequence of further
-modification of the contributor version. For purposes of this definition, *control* includes the right to grant patent
-sublicenses in a manner consistent with the requirements of this License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential
-patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its
-contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement or commitment, however denominated, not
-to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement).
-To *grant* such a patent license to a party means to make such an agreement or commitment not to enforce a patent
-against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not
-available for anyone to copy, free of charge and under the terms of this License, through a publicly available network
-server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream
- recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent license, your conveying the covered work in
-a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in
-that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring
-conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is
-automatically extended to all recipients of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of its coverage, prohibits the exercise of,
-or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You
-may not convey a covered work if you are a party to an arrangement with a third party that is in the business of
-distributing software, under which you make payment to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a
-discriminatory patent license
-
-- a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or
-- b) primarily for and in connection with specific products or compilations that contain the covered work, unless you
- entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to
-infringement that may otherwise be available to you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this
-License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to
-satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence
-you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further
-conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License
-would be to refrain entirely from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work
-licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the
-resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special
-requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply
-to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to
-time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new
-problems or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the
-GNU General Public License *or any later version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by the Free Software Foundation. If the
-Program does not specify a version number of the GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used,
-that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the
-Program.
-
-Later license versions may give you additional or different permissions. However, no additional obligations are imposed
-on any author or copyright holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING
-THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
-IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
-THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
-ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO
-MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
-LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
-TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to
-their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program
-in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve
-this is to make it free software which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to
-most effectively state the exclusion of warranty; and each file should have at least the *copyright* line and a pointer
-to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of
-course, your program's commands might be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if any, to sign a *copyright disclaimer* for
-the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is
-a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-esp-service-clock/build.gradle b/lc-esp-service-clock/build.gradle
deleted file mode 100644
index 8369e31880eb0bd6f377130f8aba2ecee3828fa4..0000000000000000000000000000000000000000
--- a/lc-esp-service-clock/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-plugins {
- id 'java'
- id 'application'
-}
-
-group 'leighco'
-version '18.0'
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-esp-service-clock/src/main/java/lc/esp/service/clock/ClockService.java b/lc-esp-service-clock/src/main/java/lc/esp/service/clock/ClockService.java
deleted file mode 100644
index f1e2bd3319dfe3cb1d450403dc31e03f6645c3d9..0000000000000000000000000000000000000000
--- a/lc-esp-service-clock/src/main/java/lc/esp/service/clock/ClockService.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package lc.esp.service.clock;
-
-import lc.esp.sdk.ESPClient;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-
-import javax.jms.*;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-import java.time.Instant;
-
-/**
- * This is a trivial implementation of an ESP service which provides the current time. This is primarily useful
- * for dev/test.
- *
- * @author Alex Leigh
- * @since mk17 (GIPSY DANGER)
- */
-public class ClockService extends BasicallyDangerous {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ClockService.class);
- private final ESPClient esp = new ESPClient();
-
- public ClockService() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
- }
-
- public static void main(String[] args) throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
- new ClockService().run();
- }
-
- @Override
- public void runDangerously() throws Exception {
- // Unfortunately, ActiveMQ does not support AutoCloseable
- esp.start();
- Connection connection = esp.getConnectionFactory().createConnection();
- connection.start();
- Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
- Destination destination = session.createTopic("CLOCK");
- MessageProducer producer = session.createProducer(destination);
- producer.setDeliveryMode(DeliveryMode.NON_PERSISTENT);
-
- //noinspection InfiniteLoopStatement
- while (true) {
- TextMessage msg = session.createTextMessage(Instant.now().toString());
- producer.send(msg);
- logger.info("Sent: {}", msg);
- Thread.sleep(1000);
- }
- }
-}
diff --git a/lc-esp-test-sdk/build.gradle b/lc-esp-test-sdk/build.gradle
deleted file mode 100644
index 28f8d7381c1580911886d46c59ee2b4d71df19fe..0000000000000000000000000000000000000000
--- a/lc-esp-test-sdk/build.gradle
+++ /dev/null
@@ -1,25 +0,0 @@
-plugins {
- id 'java'
- id 'java-library'
-}
-
-group 'leighco'
-version '1.0'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- api project(':lc-esp-sdk')
- api project(':lc-eo-schema')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/BeaconElementDAO.java b/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/BeaconElementDAO.java
deleted file mode 100644
index ed155be6145b36496dfc07f064def8c0a836f30b..0000000000000000000000000000000000000000
--- a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/BeaconElementDAO.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.test.schema.v1;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class BeaconElementDAO {
- public static final String API_TYPE = "lc.esp.test.v1.beacon";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_ID = "id";
- public static final String KEY_TIME = "time";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=64389e83-225b-4667-9696-5549f910c5df, data={eoType=id, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=a79b11d7-23a5-4829-be8a-72b89a83247e, data={text=id, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
- public static java.util.UUID getId(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueUUID(KEY_ID);
- }
-
- public static void setId(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_ID, value);
- }
-
- public static void setIdLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_ID, label);
- }
-
- public static void getLabelOrValueId(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_ID);
- }
-
- public static void setIfUnsetId(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_ID, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=6fd508b3-c5eb-4a1c-aba7-ececad2137b7, data={eoType=time, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=d8d58a42-9ca8-40a3-ac3d-3c41af39127b, data={text=time, locale=en}, meta=null}]} , type=instant}, meta=null} */
-
-
- public static java.time.Instant getTime(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueInstant(KEY_TIME);
- }
-
- public static void setTime(final EO eo, java.time.Instant value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_TIME, value);
- }
-
- public static void setTimeLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_TIME, label);
- }
-
- public static void getLabelOrValueTime(final EO eo, java.time.Instant value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_TIME);
- }
-
- public static void setIfUnsetTime(final EO eo, java.time.Instant value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_TIME, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_requestElementDAO.java b/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_requestElementDAO.java
deleted file mode 100644
index 8e62c3be2c501544c3791e5e6e17be822e437ff7..0000000000000000000000000000000000000000
--- a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_requestElementDAO.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.test.schema.v1;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Greeting_requestElementDAO {
- public static final String API_TYPE = "lc.esp.test.v1.greeting_request";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_NAME = "name";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=a599db98-7da7-40a1-84a5-149d3c251908, data={eoType=name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=b0047a64-f1c8-4f09-a3a5-1b36e8a2d1c5, data={text=Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getName(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_NAME);
- }
-
- public static void setName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_NAME, value);
- }
-
- public static void setNameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_NAME, label);
- }
-
- public static void getLabelOrValueName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_NAME);
- }
-
- public static void setIfUnsetName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_NAME, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_responseElementDAO.java b/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_responseElementDAO.java
deleted file mode 100644
index b2ad12c3eb8dbdd720a1400ff7bc6e547111b31f..0000000000000000000000000000000000000000
--- a/lc-esp-test-sdk/src/main/java/lc/esp/test/schema/v1/Greeting_responseElementDAO.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.test.schema.v1;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Greeting_responseElementDAO {
- public static final String API_TYPE = "lc.esp.test.v1.greeting_response";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_MESSAGE = "message";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', id=510987cc-038c-48b1-b89d-44a9a48a4997, data={eoType=message, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', id=8b7a2be8-3194-4f1b-ae05-940ad471fea2, data={text=Message, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getMessage(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_MESSAGE);
- }
-
- public static void setMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_MESSAGE, value);
- }
-
- public static void setMessageLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_MESSAGE, label);
- }
-
- public static void getLabelOrValueMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_MESSAGE);
- }
-
- public static void setIfUnsetMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_MESSAGE, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-esp-test-sdk/src/main/java/lc/esp/test/sdk/SchemaGenerator.java b/lc-esp-test-sdk/src/main/java/lc/esp/test/sdk/SchemaGenerator.java
deleted file mode 100644
index 6fc43c7dac4ad2bcee4e490304bf80fab2b10660..0000000000000000000000000000000000000000
--- a/lc-esp-test-sdk/src/main/java/lc/esp/test/sdk/SchemaGenerator.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2004-2017, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.esp.test.sdk;
-
-import lc.eo.EO;
-import lc.eo.EODataType;
-import lc.eo.schema.DAOGenerator;
-import lc.eo.schema.ElementElementDAO;
-import lc.eo.schema.SchemaElementDAO;
-import lc.eo.schema.util.AttributeUtil;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-
-/**
- * Generate the schema for lc-eo-changestream.
- *
- * @author Alex Leigh
- * @since mk1
- */
-public final class SchemaGenerator {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(SchemaGenerator.class);
- private static final String BASE_PKG = "lc.esp.test.v1";
-
- // Observe the procedures of a general alert
-
- public static EO generate() {
- final EO schema = lc.eo.schema.util.SchemaUtil.create(BASE_PKG + ".schema");
-
- final EO loopSet = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(loopSet);
- ElementElementDAO.setEoType(loopSet, BASE_PKG + ".beacon");
- ElementElementDAO.getAttributes(loopSet).add(AttributeUtil.create("id", "id", EODataType.uuid));
- ElementElementDAO.getAttributes(loopSet).add(AttributeUtil.create("time", "time", EODataType.instant));
-
- final EO helloRequest = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(helloRequest);
- ElementElementDAO.setEoType(helloRequest, BASE_PKG + ".greeting_request");
- ElementElementDAO.getAttributes(helloRequest).add(AttributeUtil.create("name", "Name"));
-
- final EO helloResponse = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(helloResponse);
- ElementElementDAO.setEoType(helloResponse, BASE_PKG + ".greeting_response");
- ElementElementDAO.getAttributes(helloResponse).add(AttributeUtil.create("message", "Message"));
-
- return schema;
- }
-
- public static void main(final String... args) throws IOException {
- UniversalJob.banner(logger, "Apotheosis mk3", "2014-2022 Alex Leigh");
- final EO schema = generate();
- final DAOGenerator gen = new DAOGenerator(new File("lc-esp-test-sdk/src/main/java"));
- gen.generateJava("lc.esp.test.schema.v1", schema);
- try (PrintWriter pw = new PrintWriter(new FileWriter(new File("out.js")))) {
- gen.generateEcma5(pw, "lc.esp.test.v1", schema);
- }
- logger.info("Generated schema: {}", schema);
- }
-}
diff --git a/lc-esp-test-svc/build.gradle b/lc-esp-test-svc/build.gradle
deleted file mode 100644
index bf1f61728e6b9e86335e86a6c72e628b5aa68dd6..0000000000000000000000000000000000000000
--- a/lc-esp-test-svc/build.gradle
+++ /dev/null
@@ -1,23 +0,0 @@
-plugins {
- id 'java'
-}
-
-group 'leighco'
-version '1.0'
-
-repositories {
- mavenCentral()
-}
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-dependencies {
- implementation project(':lc-esp-test-sdk')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/BeaconEndpoint.java b/lc-esp-test-svc/src/main/java/lc/esp/test/svc/BeaconEndpoint.java
deleted file mode 100644
index d1b404ce2e3ed1addcab4195cb5a6c9588513ea9..0000000000000000000000000000000000000000
--- a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/BeaconEndpoint.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package lc.esp.test.svc;
-
-import lc.eo.EO;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPMessage;
-import lc.esp.sdk.ESPProducer;
-import lc.esp.sdk.ESPSession;
-import lc.esp.test.schema.v1.BeaconElementDAO;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import javax.jms.JMSException;
-import java.time.Instant;
-import java.util.UUID;
-
-/**
- * This test service demonstrates the use of a topic. It sends out a beacon message once per second as defined
- * by the test schema.
- *
- * @author Alex Leigh
- * @since mk1
- */
-public class BeaconEndpoint implements Runnable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(BeaconEndpoint.class);
- private final ESPProducer producer;
- private final ESPAddress destination;
-
- public BeaconEndpoint(ESPSession session, ESPAddress destination) throws JMSException {
- this.producer = session.createProducer();
- this.destination = destination;
- }
-
- @Override
- public void run() {
-
- // public ESPAddress(String org, String domain, String service, String name,
- // ESPAddressClass serviceClass, ESPMessageClass destClass) {
-
- logger.info("Starting beacon...");
-
- while (true) {
- try {
- EO msg = BeaconElementDAO.create();
- BeaconElementDAO.setId(msg, UUID.randomUUID());
- BeaconElementDAO.setTime(msg, Instant.now());
- logger.debug("Sending msg: {}", msg);
- producer.send(destination, new ESPMessage(msg));
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
- }
-}
diff --git a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GraphStreamEndpoint.java b/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GraphStreamEndpoint.java
deleted file mode 100644
index 721d59b131ac7b45ba6ffccb01a687298df7de94..0000000000000000000000000000000000000000
--- a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GraphStreamEndpoint.java
+++ /dev/null
@@ -1,110 +0,0 @@
-package lc.esp.test.svc;
-
-import lc.eo.EO;
-import lc.eo.EOLoop;
-import lc.eo.changestream.ChangeLogger;
-import lc.eo.changestream.EditContext;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPMessage;
-import lc.esp.sdk.ESPProducer;
-import lc.esp.sdk.ESPSession;
-import lc.esp.sdk.schema.v18.Graph_state_responseElementDAO;
-import lc.esp.sdk.schema.v18.Graph_txnElementDAO;
-import lc.esp.sdk.service.ESPService;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import javax.jms.JMSException;
-import java.time.Instant;
-import java.util.ArrayList;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-/**
- * Reference implementation of a change stream service. This service monotonically provides a changing EO graph
- * and provides updates to clients.
- *
- * @author Alex Leigh
- * @since mk1
- */
-public class GraphStreamEndpoint extends ESPService implements Runnable, ChangeLogger {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(GraphStreamEndpoint.class);
- private final ESPProducer producer;
- private final ESPAddress streamAddress;
- private final EO graph = new EO();
- private final EditContext ctx = new EditContext(graph, this);
-
- private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
- private final Lock readLock = readWriteLock.readLock();
- private final Lock writeLock = readWriteLock.writeLock();
-
- private long currentTxnId;
-
- public GraphStreamEndpoint(ESPSession session, ESPAddress streamAddress, ESPAddress cmdAddress) throws JMSException {
- super(session, cmdAddress);
- this.producer = session.createProducer();
- this.streamAddress = streamAddress;
- }
-
- @Override
- public void run() {
- new Thread(new Runnable() {
- @Override
- public void run() {
- while (true) {
- try {
- writeLock.lock();
- logger.info("Mutating graph... {}", graph);
- graph.setValue("time", Instant.now());
- try {
- // Note that we'll catch comitted() in our lock
- ctx.commit();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- } finally {
- writeLock.unlock();
- }
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
- }
- }).start();
-
- super.run();
- }
-
- @Override
- public void comitted(long changeNumber, ArrayList changeSet) throws Exception {
- logger.info("Received commit. [txnid: {}] [changeset: {}]", changeNumber, changeSet);
- EO txn = Graph_txnElementDAO.create();
- txn.setValue(Graph_txnElementDAO.KEY_TXID, changeNumber);
- Graph_txnElementDAO.setChangeset(txn, new EOLoop(changeSet));
- ESPMessage msg = new ESPMessage(txn);
- logger.info("Sending message: {}", msg);
- producer.send(this.streamAddress, msg);
- this.currentTxnId = changeNumber;
- }
-
- public ESPMessage handle(ESPMessage request) {
- // We actually don't care about the request at this point, just assume they want the state
- try {
- readLock.lock();
- EO reply = Graph_state_responseElementDAO.create();
- reply.setValue(Graph_state_responseElementDAO.KEY_TXID, this.currentTxnId);
- EOLoop graph = Graph_state_responseElementDAO.getGraph(reply);
-
- // We do this to avoid resetting the edit tracker on our copy of the graph
- graph.addWithoutTracker(this.graph);
-
- logger.info("Replying to caller: {}", reply);
- return new ESPMessage(reply);
- } finally {
- readLock.unlock();
- }
- }
-}
diff --git a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GreeterEndpoint.java b/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GreeterEndpoint.java
deleted file mode 100644
index 6dfbdd475d997da63a668049610eeae22adec3f3..0000000000000000000000000000000000000000
--- a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/GreeterEndpoint.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package lc.esp.test.svc;
-
-import lc.eo.EO;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPMessage;
-import lc.esp.sdk.ESPSession;
-import lc.esp.sdk.service.ESPService;
-import lc.esp.test.schema.v1.Greeting_requestElementDAO;
-import lc.esp.test.schema.v1.Greeting_responseElementDAO;
-
-/**
- * This service waits for a request from clients and returns a greeting that is formulated based on the contents
- * of the request.
- *
- * @author Alex Leigh
- * @since mk1
- */
-public class GreeterEndpoint extends ESPService {
- public GreeterEndpoint(ESPSession session, ESPAddress destination) throws Exception {
- super(session, destination);
- }
-
- @Override
- public ESPMessage handle(ESPMessage request) {
- EO req = request.getPayloadEO();
- String theirName = Greeting_requestElementDAO.getName(req);
- EO reply = Greeting_responseElementDAO.create();
- Greeting_responseElementDAO.setMessage(reply, "Hello, " + theirName);
- return new ESPMessage(reply);
- }
-}
diff --git a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/TestService.java b/lc-esp-test-svc/src/main/java/lc/esp/test/svc/TestService.java
deleted file mode 100644
index cfc5e6fb0bd81a7df5a180ab97f27f1ca7a53031..0000000000000000000000000000000000000000
--- a/lc-esp-test-svc/src/main/java/lc/esp/test/svc/TestService.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package lc.esp.test.svc;
-
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPAddressClass;
-import lc.esp.sdk.ESPClient;
-import lc.esp.sdk.ESPMessageClass;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import lc.mecha.util.UniversalJob;
-
-/**
- * This service implements example backend service functionality for exercising client implementations. It is intended
- * to be used by client developers to test and exercise their sdks and applications.
- *
- * @author Alex Leigh
- * @since mk18
- */
-public class TestService extends BasicallyDangerous {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(TestService.class);
- private final ESPClient esp = new ESPClient();
-
- public TestService() throws Exception {
- esp.start();
- }
-
- public static void main(String[] args) throws Exception {
- UniversalJob.banner(logger, "TestService mk1");
- TestService svc = new TestService();
- svc.run();
- }
-
- @Override
- public void runDangerously() throws Exception {
- esp.start();
-
- // TODO: Make these configurable
-
- ESPAddress beaconDestination = new ESPAddress("lc", "global", "test", "beacon",
- ESPAddressClass.TOPIC, ESPMessageClass.TELEMETRY);
-
- ESPAddress greetingDestination = new ESPAddress("lc", "global", "test", "greeting",
- ESPAddressClass.QUEUE, ESPMessageClass.COMMAND);
-
- ESPAddress graphStreamAddress = new ESPAddress("lc", "global", "test",
- "graph_stream", ESPAddressClass.TOPIC, ESPMessageClass.STREAM);
-
- ESPAddress graphStreamCommandAddress = new ESPAddress("lc", "global",
- "test", "graph_stream", ESPAddressClass.QUEUE, ESPMessageClass.COMMAND);
-
- new Thread(new BeaconEndpoint(esp.createSession(), beaconDestination)).start();
- new Thread(new GreeterEndpoint(esp.createSession(), greetingDestination)).start();
- new Thread(new GraphStreamEndpoint(esp.createSession(), graphStreamAddress, graphStreamCommandAddress)).start();
- }
-}
diff --git a/lc-evelyn-svc/build.gradle b/lc-evelyn-svc/build.gradle
deleted file mode 100644
index 50f06a8ab43e1217301b5f6b0d53a85ffce7e8cf..0000000000000000000000000000000000000000
--- a/lc-evelyn-svc/build.gradle
+++ /dev/null
@@ -1,25 +0,0 @@
-plugins {
- id 'java'
- id 'application'
-}
-
-group 'leighco'
-version '2.0'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-cli-sdk')
- implementation project(':lc-mecha-calc')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynCommands.java b/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynCommands.java
deleted file mode 100644
index cb54345e5d8effce84ac7b01a65459e2a8e012a9..0000000000000000000000000000000000000000
--- a/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynCommands.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package lc.evelyn.svc;
-
-import lc.esp.cli.sdk.CLIService;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPSession;
-import lc.mecha.calc.CalcEngine;
-import lc.mecha.cli.cmd.BasicCommandProcessor;
-import lc.mecha.cli.cmd.CommandHandler;
-import lc.mecha.cli.line.BasicLineHandler;
-
-/**
- * This class implements Evelyn's CLI.
- *
- * @author Alex Leigh
- * @since 2.0
- */
-public class EvelynCommands extends CLIService {
- private CalcEngine calcEngine = new CalcEngine();
-
- public EvelynCommands(ESPSession client, ESPAddress address) {
- super(client, address);
- setupFrame();
- }
-
- private void setupFrame() {
- BasicCommandProcessor bcp = super.getBcp();
- bcp.addCommandHandler("HELLO", new CommandHandler() {
- @Override
- public void handle(BasicLineHandler lineHandler, String[] cmd) throws Exception {
- lineHandler.write("Hello!", false);
- }
- });
-
- bcp.addCommandHandler("CALC", new CommandHandler() {
- @Override
- public void handle(BasicLineHandler lineHandler, String[] cmd) throws Exception {
- // TODO: This is unsafe as unbounded scripts could be executed by a user
- String result = calcEngine.calc(cmd[1]);
- lineHandler.write(result, false);
- }
- });
- }
-}
\ No newline at end of file
diff --git a/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynService.java b/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynService.java
deleted file mode 100644
index fcfb60169de38eb55530950b4e858c001e233203..0000000000000000000000000000000000000000
--- a/lc-evelyn-svc/src/main/java/lc/evelyn/svc/EvelynService.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package lc.evelyn.svc;
-
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPClient;
-import lc.mecha.util.BasicallyDangerous;
-
-/**
- * This class implements the "brain" for Evelyn. It uses the CLI model to listen on a ESP queue for remote
- * commands. The response of the command is returned to the caller via the CLI mechanics.
- *
- * @author Alex Leigh
- * @since 2.0
- */
-public class EvelynService extends BasicallyDangerous {
- public static void main(String[] args) {
- new EvelynService().run();
- }
-
- @Override
- public void runDangerously() throws Exception {
- ESPClient client = new ESPClient();
- client.start();
- EvelynCommands cli = new EvelynCommands(client.createSession(),
- new ESPAddress("queue://svc@lc.global.loa.evelyn.cmd"));
- cli.run();
- }
-}
diff --git a/lc-example-graphql/build.gradle b/lc-example-graphql/build.gradle
index 8f8e10a7af8fb37f0915516a35285f14d9226691..905b361278920e1ae972c5541000c02bc2f2de89 100644
--- a/lc-example-graphql/build.gradle
+++ b/lc-example-graphql/build.gradle
@@ -4,7 +4,7 @@ plugins {
sourceCompatibility = JavaVersion.VERSION_17
targetCompatibility = JavaVersion.VERSION_17
group 'leigh'
-version '18.0'
+version '19.0'
repositories {
mavenCentral()
diff --git a/lc-gdn-chef/LICENSE.md b/lc-gdn-chef/LICENSE.md
deleted file mode 100644
index 45a6272868993549245a703bf2a3445014e32577..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/LICENSE.md
+++ /dev/null
@@ -1,492 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and other kinds of works.
-
-The licenses for most software and other practical works are designed to take away your freedom to share and change the
-works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use
-the GNU General Public License for most of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make
-sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive
-source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and
-that you know you can do these things.
-
-To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights.
-Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients
-the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must
-show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software.
-For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems
-will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified versions of the software inside them, although
-the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the
-software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely
-where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we stand ready to extend this provision to those
-domains in future versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States should not allow patents to restrict
-development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger
-that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License. Each licensee is addressed as *you*. *
-Licensees* and *recipients* may be individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
-other than the making of an exact copy. The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the Program.
-
-To *propagate* a work means to do anything with it that, without permission, would make you directly or secondarily
-liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification), making available to the public, and in some
-countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction
-with a user through a computer network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the extent that it includes a convenient and
-prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the extent that warranties are provided), that
- licensees may convey the work under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this
-criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making modifications to it. *Object code* means
-any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard defined by a recognized standards body,
-or, in the case of interfaces specified for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the work as a whole, that (a) is included in
-the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to
-enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is
-available to the public in source code form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a
-compiler used to produce the work, or an object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source code needed to generate, install, and (
-for an executable work) run the object code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs
-which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding
-Source includes interface definition files associated with source files for the work, and the source code for shared
-libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data
-communication or control flow between those subprograms and other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate automatically from other parts of the
-Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided
-the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program.
-The output from running a covered work is covered by this License only if the output, given its content, constitutes a
-covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
-
-You may make, run and propagate covered works that you do not convey, without conditions so long as your license
-otherwise remains in force. You may convey covered works to others for the sole purpose of having them make
-modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that
-prohibit them from making any copies of your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not
-allowed; section 10 makes it unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling
-obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the
-extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you
-disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users,
-your or third parties' legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating
-that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices
-of the absence of any warranty; and give all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for
-a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source
-code under the terms of section 4, provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under this License and any conditions added under
- section 7. This requirement modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy.
- This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and
- all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other
- way, but it does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program
- has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
-
-A compilation of a covered work with other separate and independent works, which are not by their nature extensions of
-the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or
-distribution medium, is called an *aggregate* if the compilation and its resulting copyright are not used to limit the
-access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
-- b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the object code either
-
-1. a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable
- physical medium customarily used for software interchange, for a price no more than your reasonable cost of
- physically performing this conveying of source, or
-2. access to copy the Corresponding Source from a network server at no charge.
-
-- c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source.
- This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such
- an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same
- place at no further charge. You need not require recipients to copy the Corresponding Source along with the object
- code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server
- operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions
- next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these
- requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and
- Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
-
-A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library,
-need not be included in conveying the object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is normally used for personal, family, or
- household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a
-particular product received by a particular user, *normally used* refers to a typical or common use of that class of
-product, regardless of the status of the particular user or of the way in which the particular user actually uses, or
-expects or is expected to use, the product. A product is a consumer product regardless of whether the product has
-substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of
-the product.
-
-*Installation Information* for a User Product means any methods, procedures, authorization keys, or other information
-required to install and execute modified versions of a covered work in that User Product from a modified version of its
-Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code
-is in no case prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the
-conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to
-the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding
-Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not
-apply if neither you nor any third party retains the ability to install modified object code on the User Product (for
-example, the work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a requirement to continue to provide support
-service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product
-in which it has been modified or installed. Access to a network may be denied when the modification itself materially
-and adversely affects the operation of the network or violates the rules and protocols for communication across the
-network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format
-that is publicly documented (and with an implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by making exceptions from one or more of
-its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable law. If additional permissions apply only
-to part of the Program, that part may be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or
-from any part of it. (Additional permissions may be written to require their own removal in certain cases when you
-modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have
-or can give appropriate copyright permission.
-
-Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by
-the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the
- Appropriate Legal Notices displayed by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material
- be marked in reasonable ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
-- e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified
- versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual
- assumptions directly impose on those licensors and authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that
-it is governed by this License along with a term that is a further restriction, you may remove that term. If a license
-document contains a further restriction but permits relicensing or conveying under this License, you may add to a
-covered work material governed by the terms of that license document, provided that the further restriction does not
-survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a
-statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as
-exceptions; the above requirements apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to
-propagate or modify it is void, and will automatically terminate your rights under this License (including any patent
-licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days
- after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you
-of the violation by some reasonable means, this is the first time you have received notice of violation of this
-License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the
-notice.
-
-Termination of your rights under this section does not terminate the licenses of parties who have received copies or
-rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a
-covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not
-require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered
-work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run,
-modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third
-parties with this License.
-
-An *entity transaction* is a transaction transferring control of an organization, or substantially all assets of one, or
-subdividing an organization, or merging organizations. If propagation of a covered work results from an entity
-transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work
-the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For
-example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License,
-and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent
-claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of the Program or a work on which the
-Program is based. The work thus licensed is called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or controlled by the contributor, whether already
-acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or
-selling its contributor version, but do not include claims that would be infringed only as a consequence of further
-modification of the contributor version. For purposes of this definition, *control* includes the right to grant patent
-sublicenses in a manner consistent with the requirements of this License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential
-patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its
-contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement or commitment, however denominated, not
-to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement).
-To *grant* such a patent license to a party means to make such an agreement or commitment not to enforce a patent
-against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not
-available for anyone to copy, free of charge and under the terms of this License, through a publicly available network
-server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream
- recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent license, your conveying the covered work in
-a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in
-that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring
-conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is
-automatically extended to all recipients of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of its coverage, prohibits the exercise of,
-or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You
-may not convey a covered work if you are a party to an arrangement with a third party that is in the business of
-distributing software, under which you make payment to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a
-discriminatory patent license
-
-- a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or
-- b) primarily for and in connection with specific products or compilations that contain the covered work, unless you
- entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to
-infringement that may otherwise be available to you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this
-License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to
-satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence
-you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further
-conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License
-would be to refrain entirely from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work
-licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the
-resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special
-requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply
-to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to
-time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new
-problems or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the
-GNU General Public License *or any later version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by the Free Software Foundation. If the
-Program does not specify a version number of the GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used,
-that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the
-Program.
-
-Later license versions may give you additional or different permissions. However, no additional obligations are imposed
-on any author or copyright holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING
-THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
-IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
-THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
-ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO
-MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
-LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
-TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to
-their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program
-in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve
-this is to make it free software which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to
-most effectively state the exclusion of warranty; and each file should have at least the *copyright* line and a pointer
-to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of
-course, your program's commands might be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if any, to sign a *copyright disclaimer* for
-the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is
-a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-gdn-chef/cookbooks/acme/.rubocop.yml b/lc-gdn-chef/cookbooks/acme/.rubocop.yml
deleted file mode 100644
index 65165307e719b08e6fe7e67198ba1458cc07c446..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/.rubocop.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-AllCops:
- TargetChefVersion: 17.latest
-Chef/Modernize/FoodcriticComments:
- Enabled: true
-Chef/Style/CopyrightCommentFormat:
- Enabled: true
diff --git a/lc-gdn-chef/cookbooks/acme/Berksfile b/lc-gdn-chef/cookbooks/acme/Berksfile
deleted file mode 100644
index c6b4e7a5b48a773a32efd8654b709bf6cdee1efd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/Berksfile
+++ /dev/null
@@ -1,8 +0,0 @@
-source 'https://supermarket.chef.io'
-
-group :integration do
- cookbook 'acme_client', path: 'test/fixtures/cookbooks/acme_client'
- cookbook 'acme_server', path: 'test/fixtures/cookbooks/acme_server'
-end
-
-metadata
diff --git a/lc-gdn-chef/cookbooks/acme/Berksfile.lock b/lc-gdn-chef/cookbooks/acme/Berksfile.lock
deleted file mode 100644
index de9191787691bd438c4de727c595744c5b6748d8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/Berksfile.lock
+++ /dev/null
@@ -1,22 +0,0 @@
-DEPENDENCIES
- acme
- path: .
- metadata: true
- acme_client
- path: test/fixtures/cookbooks/acme_client
- acme_server
- path: test/fixtures/cookbooks/acme_server
-
-GRAPH
- acme (4.1.4)
- acme_client (0.1.0)
- acme (>= 0.0.0)
- nginx (>= 0.0.0)
- acme_server (0.1.0)
- golang (>= 5.3.0)
- ark (6.0.3)
- seven_zip (>= 3.1)
- golang (5.3.1)
- ark (>= 6.0)
- nginx (12.0.10)
- seven_zip (4.2.2)
diff --git a/lc-gdn-chef/cookbooks/acme/LICENSE b/lc-gdn-chef/cookbooks/acme/LICENSE
deleted file mode 100644
index 3c7287fd8ef8c09072cea9e9a6c53f8afc3068c9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright 2015-2018 Schuberg Philis
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/acme/README.md b/lc-gdn-chef/cookbooks/acme/README.md
deleted file mode 100644
index efb8ce1d2f97164f42a5e607c54599e69a2670cc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/README.md
+++ /dev/null
@@ -1,213 +0,0 @@
-ACME cookbook
-=============
-
-[](https://travis-ci.org/schubergphilis/chef-acme)
-[](https://supermarket.chef.io/cookbooks/acme)
-
-Automatically get/renew free and trusted certificates from Let's Encrypt (letsencrypt.org). ACME is
-the [Automated Certificate Management Environment protocol][1] used by [Let's Encrypt][2].
-
-```
-Starting with v4.0.0 of the acme cookbook the acme_ssl_certificate provider has been removed! The TLS-SNI-01 validation method used by this provider been disabled by Let's Encrypt due to security concerns. Please switch to the acme_certificate provider in this cookbook to request and renew your certificate using the supported HTTP-01 validation method.
-```
-
-Attributes
-----------
-
-| Attribute | Description | Default |
-| -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------: |
-| contact | Contact information, default empty. Set to `mailto:your@email.com` | [] |
-| dir | ACME server endpoint, Set to `https://acme-staging-v02.api.letsencrypt.org/directory` if you want to use the Let's Encrypt staging environment and corresponding certificates. | `https://acme-v02.api.letsencrypt.org/directory` |
-| renew | Days before the certificate expires at which the certificate will be renewed | 30 |
-| source_ips | IP addresses used by Let's Encrypt to verify the TLS certificates, it will change over time. This attribute is for firewall purposes. Allow these IPs for HTTP (tcp/80). | ['66.133.109.36'] |
-| private_key | Private key content of registered account. Private keys identify the ACME client with the endpoint and are not transferable between staging and production endpoints. | nil |
-| key_size | Default private key size used when resource property is not. Must be one out of: 2048, 3072, 4096. | 2048 |
-
-Recipes
--------
-
-### default
-
-Installs the required acme-client rubygem.
-
-Usage
------
-Use the `acme_certificate` resource to request a certificate with the http-01 challenge. The webserver for the domain
-for which you are requesting a certificate must be running on the local server. This resource only supports the http
-validation method. To use the tls-sni-01 challenge, please see the resource below. Provide the path to your `wwwroot`
-for the specified domain.
-
-```ruby
-acme_certificate 'test.example.com' do
- crt '/etc/ssl/test.example.com.crt'
- key '/etc/ssl/test.example.com.key'
- wwwroot '/var/www'
-end
-```
-
-If your webserver needs an existing certificate already when installing a new server, you will have a bootstrap problem:
-The web server cannot start without a certificate, but the certificate cannot be requested without the running web
-server. To overcome this, a temporary self-signed certificate can be generated with the `acme_selfsigned` resource,
-allowing the web server to start.
-
-```ruby
-acme_selfsigned 'test.example.com' do
- crt '/etc/ssl/test.example.com.crt'
- chain '/etc/ssl/test.example.com-chain.crt'
- key '/etc/ssl/test.example.com.key'
-end
-```
-
-A working example can be found in the included `acme_client` test cookbook.
-
-Providers
----------
-
-### certificate
-
-| Property | Type | Default | Description |
-| --- | --- | --- | --- |
-| `cn` | string | _name_ | The common name for the certificate |
-| `alt_names` | array | [] | The common name for the certificate |
-| `crt` | string | nil | File path to place the certificate |
-| `key` | string | nil | File path to place the private key |
-| `key_size` | integer | 2048 | Private key size. Must be one out of: 2048, 3072, 4096 |
-| `owner` | string | root | Owner of the created files |
-| `group` | string | root | Group of the created files |
-| `wwwroot` | string | /var/www | Path to the wwwroot of the domain |
-| `ignore_failure` | boolean | false | Whether to continue chef run if issuance fails |
-| `retries` | integer | 0 | Number of times to catch exceptions and retry |
-| `retry_delay` | integer | 2 | Number of seconds to wait between retries |
-| `endpoint` | string | nil | The Let's Encrypt endpoint to use |
-| `contact` | array | [] | The contact to use |
-
-### selfsigned
-
-| Property | Type | Default | Description |
-| --- | --- | --- | --- |
-| `cn` | string | _name_ | The common name for the certificate |
-| `crt` | string | nil | File path to place the certificate |
-| `key` | string | nil | File path to place the private key |
-| `key_size` | integer | 2048 | Private key size. Must be one out of: 2048, 3072, 4096 |
-| `chain` | string | nil | File path to place the certificate chain |
-| `owner` | string | root | Owner of the created files |
-| `group` | string | root | Group of the created files |
-
-Example
--------
-To generate a certificate for an apache2 website you can use code like this:
-
-```ruby
-# Include the recipe to install the gems
-include_recipe 'acme'
-
-# Set up contact information. Note the mailto: notation
-node.override['acme']['contact'] = ['mailto:me@example.com']
-# Real certificates please...
-node.override['acme']['endpoint'] = 'https://acme-v01.api.letsencrypt.org'
-
-site = "example.com"
-sans = ["www.#{site}"]
-
-# Generate a self-signed if we don't have a cert to prevent bootstrap problems
-acme_selfsigned "#{site}" do
- crt "/etc/httpd/ssl/#{site}.crt"
- key "/etc/httpd/ssl/#{site}.key"
- chain "/etc/httpd/ssl/#{site}.pem"
- owner "apache"
- group "apache"
- notifies :restart, "service[apache2]", :immediate
-end
-
-# Set up your web server here...
-
-# Get and auto-renew the certificate from Let's Encrypt
-acme_certificate "#{site}" do
- crt "/etc/httpd/ssl/#{site}.crt"
- key "/etc/httpd/ssl/#{site}.key"
- wwwroot "/var/www/#{site}/htdocs/"
- notifies :restart, "service[apache2]"
- alt_names sans
-end
-```
-
-DNS verification
-----------------
-
-Letsencrypt supports DNS validation. Depending on the setup there may be different ways to deploy an acme challenge to
-your infrastructure. If you want to use DSN validation, you have to provide two block arguments to
-the `acme_certificate` resource.
-
-Implement 2 methods in a library in your cookbook, each returning a `Proc` object. The following example uses a HTTP API
-to provide challenges to the DNS infrastructure.
-
-```ruby
-# my_cookbook/libraries/acme_dns.rb
-
-class Chef
- class Recipe
- def install_dns_challenge(apitoken)
- Proc.new do |authorization, new_resource|
- # use DNS authorization
- authz = authorization.dns
- fqdn = authorization.identifier['value']
- r = Net::HTTP.post(URI("https://my_awesome_dns_api/#{fqdn}"), authz.record_content, {'Authorization' => "Token #{apitoken}"})
- if r.code != '200'
- fail "DNS API does not want to install Challenge for #{fqdn}"
- else
- # do some validation that the challenge has propagated to the infrastructure
- end
- # it is important that the authz and fqdn is passed back, so it can be passed to the remove_dns_challenge method
- [authz, fqdn]
- end
- end
- def remove_dns_challenge(apitoken)
- Proc.new do |authz, fqdn|
- uri = URI("https://my_awesome_dns_api/#{fqdn}")
- Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme=='https') do |http|
- http.delete(uri, {'Authorization' => "Token #{apitoken}"})
- end
- end
- end
- end
-end
-```
-
-Use it in your recipe the following way:
-
-```ruby
-apitoken = chef_vault_item(vault, item)['dns_api_token']
-acme_certificate node['fqdn'] do
- key '/path/to/key'
- crt '/path/to/crt'
- install_authz_block install_dns_challenge(apitoken)
- remove_authz_block remove_dns_challenge(apitoken)
-end
-```
-
-Testing
--------
-The kitchen includes a `pebble` server to run the integration tests with, so testing can run locally without interaction
-with the online APIs.
-
-Contributing
-------------
-
-1. Fork the repository on Github
-2. Create a named feature branch (like `add_component_x`)
-3. Write your change
-4. Write tests for your change (if applicable)
-5. Run the tests, ensuring they all pass
-6. Submit a Pull Request using Github
-
-License and Authors
--------------------
-Authors: Thijs Houtenbos
-
-Credits
--------
-Let’s Encrypt is a trademark of the Internet Security Research Group. All rights reserved.
-
-[1]: https://ietf-wg-acme.github.io/acme/
-
-[2]: https://letsencrypt.org/
diff --git a/lc-gdn-chef/cookbooks/acme/attributes/default.rb b/lc-gdn-chef/cookbooks/acme/attributes/default.rb
deleted file mode 100644
index 64a7d1305fb91842e6cc230416e51c027153434d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/attributes/default.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Attribute:: default
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['acme']['contact'] = []
-default['acme']['dir'] = 'https://acme-v02.api.letsencrypt.org/directory'
-default['acme']['renew'] = 30
-default['acme']['source_ips'] = %w(66.133.109.36 64.78.149.164)
-
-default['acme']['private_key'] = nil
-default['acme']['private_key_file'] = '/etc/acme/account_private_key.pem'
-default['acme']['gem_version'] = '2.0.9'
-default['acme']['key_size'] = 2048
diff --git a/lc-gdn-chef/cookbooks/acme/chefignore b/lc-gdn-chef/cookbooks/acme/chefignore
deleted file mode 100644
index 138a808b67633b24ad88bf2d33fafca5f30e8e16..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/chefignore
+++ /dev/null
@@ -1,94 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# or sharing to the community site.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-Icon?
-nohup.out
-ehthumbs.db
-Thumbs.db
-
-# SASS #
-########
-.sass-cache
-
-# EDITORS #
-###########
-\#*
-.#*
-*~
-*.sw[a-z]
-*.bak
-REVISION
-TAGS*
-tmtags
-*_flymake.*
-*_flymake
-*.tmproj
-.project
-.settings
-mkmf.log
-
-## COMPILED ##
-##############
-a.out
-*.o
-*.pyc
-*.so
-*.com
-*.class
-*.dll
-*.exe
-*/rdoc/
-
-# Testing #
-###########
-.watchr
-.rspec
-spec/*
-spec/fixtures/*
-test/*
-features/*
-Guardfile
-Procfile
-
-# SCM #
-#######
-.git
-*/.git
-.gitignore
-.gitmodules
-.gitconfig
-.gitattributes
-.svn
-*/.bzr/*
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-cookbooks/*
-tmp
-
-# Cookbooks #
-#############
-CONTRIBUTING
-CHANGELOG*
-
-# Strainer #
-############
-Colanderfile
-Strainerfile
-.colander
-.strainer
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
-
-# Travis #
-##########
-.travis.yml
diff --git a/lc-gdn-chef/cookbooks/acme/kitchen.digitalocean.yml b/lc-gdn-chef/cookbooks/acme/kitchen.digitalocean.yml
deleted file mode 100644
index 1a8d08df266fb5a1029c79a79958184283c4636e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/kitchen.digitalocean.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-driver:
- name: digitalocean
- private_networking: false
- size: 2gb
-
-transport:
- name: rsync
diff --git a/lc-gdn-chef/cookbooks/acme/kitchen.dokken.yml b/lc-gdn-chef/cookbooks/acme/kitchen.dokken.yml
deleted file mode 100644
index e6c733e13440793279b1f717af65b6a61e3f7f5d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/kitchen.dokken.yml
+++ /dev/null
@@ -1,56 +0,0 @@
----
-driver:
- name: dokken
- chef_version: latest
- chef_license: accept-no-persist
- privileged: true # because Docker and systemd/Upstart
- volumes:
- - /sys/fs/cgroup:/sys/fs/cgroup # because of systemd, rabbitmq
-
-transport:
- name: dokken
-
-provisioner:
- name: dokken
-
-verifier:
- name: inspec
-
-platforms:
- - name: debian-9
- driver:
- image: dokken/debian-9
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
-
- - name: debian-10
- driver:
- image: dokken/debian-10
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
-
- - name: centos-7
- driver:
- image: dokken/centos-7
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: centos-8
- driver:
- image: dokken/centos-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: ubuntu-18.04
- driver:
- image: dokken/ubuntu-18.04
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
-
- - name: ubuntu-20.04
- driver:
- image: dokken/ubuntu-20.04
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
diff --git a/lc-gdn-chef/cookbooks/acme/kitchen.yml b/lc-gdn-chef/cookbooks/acme/kitchen.yml
deleted file mode 100644
index 92e9599f97a867f0e680a86d1468e7c3e42abf49..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/kitchen.yml
+++ /dev/null
@@ -1,34 +0,0 @@
----
-driver:
- name: vagrant
- customize:
- memory: 1024
-
-verifier:
- name: inspec
-
-provisioner:
- name: chef_zero
- product_name: chef
- chef_license: accept-no-persist
- deprecations_as_errors: true
-
-platforms:
- - name: centos-7
- - name: centos-8
- - name: centos-stream-8
- - name: debian-9
- - name: debian-10
- - name: ubuntu-18.04
- - name: ubuntu-20.04
-
-suites:
- - name: http
- run_list:
- - recipe[acme_server]
- - recipe[acme_client::http]
- attributes:
- acme:
- dir: https://127.0.0.1:14000/dir
- contact:
- - mailto:admin@example.com
diff --git a/lc-gdn-chef/cookbooks/acme/libraries/acme.rb b/lc-gdn-chef/cookbooks/acme/libraries/acme.rb
deleted file mode 100644
index cb5262a52612ba5fe7d759ff8c7dc50a45244aef..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/libraries/acme.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Library:: acme
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-begin
- require 'acme-client'
-rescue LoadError => e
- Chef::Log.warn("Acme library dependency 'acme-client' not loaded: #{e}")
-end
-
-def acme_client
- return @client if @client
-
- # load private_key from disk if present
- private_key_file = node['acme']['private_key_file']
- node.default['acme']['private_key'] = ::File.read(private_key_file) if ::File.exist?(private_key_file)
-
- private_key = OpenSSL::PKey::RSA.new(node['acme']['private_key'] || 2048)
-
- directory = new_resource.dir || node['acme']['dir']
-
- contact = (new_resource.contact.nil? || new_resource.contact.empty?) ? node['acme']['contact'] : new_resource.contact
-
- @client = Acme::Client.new(private_key: private_key, directory: directory)
-
- if node['acme']['private_key'].nil?
- acme_client.new_account(contact: contact, terms_of_service_agreed: true)
- node.default['acme']['private_key'] = private_key.to_pem
-
- # write key to disk for persistence
- directory File.dirname(private_key_file) do
- recursive true
- end
-
- file private_key_file do
- content private_key.to_pem
- mode '600'
- sensitive true
- end
- end
-
- @client
-end
-
-def acme_order_certs_for(names)
- acme_client.new_order(identifiers: names)
-end
-
-def acme_validate(authz)
- authz.request_validation
-
- times = 60
-
- while times > 0
- break unless authz.status == 'pending'
- times -= 1
- sleep 1
- authz.reload
- end
-
- authz
-end
-
-def acme_cert(order, cn, key, alt_names = [])
- csr = Acme::Client::CertificateRequest.new(
- common_name: cn,
- names: alt_names,
- private_key: key
- )
- order.finalize(csr: csr)
-
- while order.status == 'processing'
- sleep 1
- order.reload
- end
-
- order.certificate
-end
-
-def self_signed_cert(cn, alts, key)
- cert = OpenSSL::X509::Certificate.new
- cert.subject = cert.issuer = OpenSSL::X509::Name.new([['CN', cn, OpenSSL::ASN1::UTF8STRING]])
- cert.not_before = Time.now
- cert.not_after = Time.now + 60 * 60 * 24 * node['acme']['renew']
- cert.public_key = key.public_key
- cert.serial = 0x0
- cert.version = 2
-
- ef = OpenSSL::X509::ExtensionFactory.new
- ef.subject_certificate = cert
- ef.issuer_certificate = cert
-
- cert.extensions = []
-
- cert.extensions += [ef.create_extension('basicConstraints', 'CA:FALSE', true)]
- cert.extensions += [ef.create_extension('subjectKeyIdentifier', 'hash')]
- cert.extensions += [ef.create_extension('subjectAltName', alts.map { |d| "DNS:#{d}" }.join(','))] unless alts.empty?
-
- cert.sign key, OpenSSL::Digest.new('SHA256')
-end
diff --git a/lc-gdn-chef/cookbooks/acme/metadata.json b/lc-gdn-chef/cookbooks/acme/metadata.json
deleted file mode 100644
index 4f7812d437aa5844302c385a84268f8c4a23e81c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/metadata.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "name": "acme",
- "description": "ACME client cookbook for free and trusted SSL/TLS certificates from Let's Encrypt",
- "long_description": "",
- "maintainer": "Thijs Houtenbos",
- "maintainer_email": "thoutenbos@schubergphilis.com",
- "license": "Apache-2.0",
- "platforms": {
- "ubuntu": ">= 0.0.0",
- "debian": ">= 0.0.0",
- "redhat": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "fedora": ">= 0.0.0"
- },
- "dependencies": {
- },
- "providing": {
- },
- "recipes": {
- },
- "version": "4.1.5",
- "source_url": "https://github.com/schubergphilis/chef-acme",
- "issues_url": "https://github.com/schubergphilis/chef-acme/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 15.3"
- ]
- ],
- "ohai_versions": [
- ],
- "gems": [
- ]
-}
diff --git a/lc-gdn-chef/cookbooks/acme/metadata.rb b/lc-gdn-chef/cookbooks/acme/metadata.rb
deleted file mode 100644
index e6bd654cc3339e1fcb171e36b6593c8d2d8b33fe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/metadata.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-name 'acme'
-maintainer 'Thijs Houtenbos'
-maintainer_email 'thoutenbos@schubergphilis.com'
-license 'Apache-2.0'
-description 'ACME client cookbook for free and trusted SSL/TLS certificates from Let\'s Encrypt'
-source_url 'https://github.com/schubergphilis/chef-acme'
-issues_url 'https://github.com/schubergphilis/chef-acme/issues'
-version '4.1.5'
-chef_version '>= 15.3'
-
-%w(ubuntu debian redhat centos fedora).each do |os|
- supports os
-end
diff --git a/lc-gdn-chef/cookbooks/acme/recipes/default.rb b/lc-gdn-chef/cookbooks/acme/recipes/default.rb
deleted file mode 100644
index 5c81dca243d43b05cf648e40f7a99e32ce029937..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/recipes/default.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Recipe:: default
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-chef_gem 'acme-client' do
- action :install
- version node['acme']['gem_version']
- compile_time true
-end
-
-require 'acme-client'
diff --git a/lc-gdn-chef/cookbooks/acme/resources/certificate.rb b/lc-gdn-chef/cookbooks/acme/resources/certificate.rb
deleted file mode 100644
index 56874f1fd80012bccff40f474822604eef36e6d3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/resources/certificate.rb
+++ /dev/null
@@ -1,168 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Resource:: certificate
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-default_action :create
-
-property :cn, String, name_property: true
-property :alt_names, Array, default: []
-
-property :crt, [String, nil], required: true
-property :key, [String, nil], required: true
-
-property :owner, String, default: 'root'
-property :group, String, default: 'root'
-
-property :wwwroot, String, default: '/var/www'
-
-property :key_size, Integer, default: lazy { node['acme']['key_size'] }, equal_to: [2048, 3072, 4096]
-
-property :dir, [String, nil]
-property :contact, Array, default: []
-
-# if you want to use DNS authentication, you can pass the code to install and
-# remove the challenge as a block
-#
-# the install_authz_block will be called for each authorization with the
-# authorization and resource as parameter. It must return the authz object from
-# the authorization.
-# The resource will then call the acme verification process. After verification
-# the remove_authz_block will be called with the authz as parameter. This is
-# intended to allow cleanup of the challenge
-property :install_authz_block, [Proc, nil]
-property :remove_authz_block, [Proc, nil]
-
-property :chain, String, deprecated: 'The chain property has been deprecated as the acme-client gem now returns the full certificate chain by default (on the crt property.) Please update your cookbooks to remove this property.'
-deprecated_property_alias 'fullchain', 'crt', 'The fullchain property has been deprecated as the acme-client gem now returns the full certificate chain by default (on the crt property.) Please update your cookbooks to switch to \'crt\'.'
-
-deprecated_property_alias 'endpoint', 'dir', 'The endpoint property was renamed to dir, to reflect ACME v2 changes. Please update your cookbooks to use the new property name.'
-
-def names_changed?(cert, names)
- return false if names.empty?
-
- san_extension = cert.extensions.find { |e| e.oid == 'subjectAltName' }
- return false if san_extension.nil?
-
- current = san_extension.value.split(', ').select { |v| v.start_with?('DNS:') }.map { |v| v.split(':')[1] }
- !(names - current).empty? || !(current - names).empty?
-end
-
-action :create do
- file "#{new_resource.cn} SSL key" do
- path new_resource.key
- owner new_resource.owner
- group new_resource.group
- mode '400'
- content OpenSSL::PKey::RSA.new(new_resource.key_size).to_pem
- sensitive true
- action :nothing
- end.run_action(:create_if_missing)
-
- mycert = nil
- mykey = OpenSSL::PKey::RSA.new ::File.read new_resource.key
- names = [new_resource.cn, new_resource.alt_names].flatten.compact
- renew_at = ::Time.now + 60 * 60 * 24 * node['acme']['renew']
-
- if !new_resource.crt.nil? && ::File.exist?(new_resource.crt)
- mycert = ::OpenSSL::X509::Certificate.new ::File.read new_resource.crt
- end
-
- if mycert.nil? || mycert.not_after <= renew_at || names_changed?(mycert, names)
- order = acme_order_certs_for(names)
- all_validations = []
- if new_resource.install_authz_block.nil?
- order.authorizations.each do |authorization|
- authz = install_http_validation(authorization, new_resource)
- acme_validate(authz)
- remove_http_validation(authz, new_resource)
- all_validations.push(authz)
- end
- else
- ruby_block 'install and validate challenges using custom method' do
- block do
- order.authorizations.each do |authorization|
- authz, fqdn = new_resource.install_authz_block.call(authorization, new_resource)
- acme_validate(authz)
- new_resource.remove_authz_block.call(authz, fqdn)
- end
- end
- end
- end
-
- ruby_block "create certificate for #{new_resource.cn}" do
- block do
- unless (all_validations.map { |authz| authz.status == 'valid' }).all?
- errors = all_validations.select { |authz| authz.status != 'valid' }.map do |authz|
- "{url: #{authz.url}, status: #{authz.status}, error: #{authz.error}} "
- end.reduce(:+)
-
- raise "[#{new_resource.cn}] Validation failed, unable to request certificate, Errors: [#{errors}]"
- end
-
- begin
- newcert = acme_cert(order, new_resource.cn, mykey, new_resource.alt_names)
- rescue Acme::Client::Error => e
- raise "[#{new_resource.cn}] Certificate request failed: #{e.message}"
- else
- Chef::Resource::File.new("#{new_resource.cn} SSL new crt", run_context).tap do |f|
- f.path new_resource.crt
- f.owner new_resource.owner
- f.group new_resource.group
- f.content newcert
- f.mode 00644
- end.run_action :create
- end
- end
- end
- end
-end
-
-action_class.class_eval do
- def install_http_validation(authorization, new_resource)
- authz = authorization.http
- tokenpath = "#{new_resource.wwwroot}/#{authz.filename}"
-
- directory ::File.dirname(tokenpath) do
- owner new_resource.owner
- group new_resource.group
- mode '755'
- recursive true
- action :nothing
- end.run_action(:create)
-
- file tokenpath do
- owner new_resource.owner
- group new_resource.group
- mode '644'
- content authz.file_content
- action :nothing
- end.run_action(:create)
- authz
- end
-
- def remove_http_validation(authz, new_resource)
- tokenpath = "#{new_resource.wwwroot}/#{authz.filename}"
- file tokenpath do
- backup false
- action :delete
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/acme/resources/selfsigned.rb b/lc-gdn-chef/cookbooks/acme/resources/selfsigned.rb
deleted file mode 100644
index f77f04772cd25e628e4bd7421ae2c7e4c32db9af..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/resources/selfsigned.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Resource:: selfsigned
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-default_action :create
-
-property :cn, String, name_property: true
-property :alt_names, Array, default: []
-
-property :crt, [String, nil], required: true
-property :key, [String, nil], required: true
-
-property :chain, [String, nil]
-
-property :owner, String, default: 'root'
-property :group, String, default: 'root'
-
-property :key_size, Integer, default: lazy { node['acme']['key_size'] }, equal_to: [2048, 3072, 4096]
-
-action :create do
- file "#{new_resource.cn} SSL selfsigned key" do
- path new_resource.key
- owner new_resource.owner
- group new_resource.group
- mode '400'
- content OpenSSL::PKey::RSA.new(new_resource.key_size).to_pem
- sensitive true
- action :create_if_missing
- end
-
- file "#{new_resource.cn} SSL selfsigned crt" do
- path new_resource.crt
- owner new_resource.owner
- group new_resource.group
- mode '644'
- content lazy { self_signed_cert(new_resource.cn, new_resource.alt_names, OpenSSL::PKey::RSA.new(::File.read(new_resource.key))).to_pem }
- action :create_if_missing
- end
-
- file "#{new_resource.cn} SSL selfsigned chain" do
- path new_resource.chain unless new_resource.chain.nil?
- owner new_resource.owner
- group new_resource.group
- mode '644'
- content lazy { self_signed_cert(new_resource.cn, new_resource.alt_names, OpenSSL::PKey::RSA.new(::File.read(new_resource.key))).to_pem }
- not_if { new_resource.chain.nil? }
- action :create_if_missing
- end
-end
diff --git a/lc-gdn-chef/cookbooks/acme/resources/ssl_key.rb b/lc-gdn-chef/cookbooks/acme/resources/ssl_key.rb
deleted file mode 100644
index f80036f5db049576776223e528c2779f618ed2ea..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/resources/ssl_key.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# Author:: Thijs Houtenbos
-# Cookbook:: acme
-# Resource:: ssl_key
-#
-# Copyright:: 2015-2021, Schuberg Philis
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-default_action :create_if_missing
-
-property :path, String, name_property: true
-property :length, Integer, default: 2048
-property :output_format, Symbol, equal_to: [:pem, :der, :text], default: :pem
-property :type, Symbol, equal_to: [:rsa, :dsa], default: :rsa
-
-def load
- klass = OpenSSL::PKey.const_get(type.upcase)
- klass.new(::File.read(path)) if ::File.exist?(path)
-end
-
-def do_action(file_action)
- klass = OpenSSL::PKey.const_get(new_resource.type.upcase)
- key = klass.new(new_resource.length)
- data = key.send("to_#{new_resource.output_format}".to_sym)
-
- file new_resource.path do
- owner owner
- group group
- mode '400'
- content data
- sensitive true
-
- action file_action
- end
-end
-
-action :create do
- do_action(:create)
-end
-
-action :destroy do
- do_action(:destroy)
-end
-
-action :create_if_missing do
- do_action(:create_if_missing)
-end
diff --git a/lc-gdn-chef/cookbooks/acme/templates/acme-challange.nginx.erb b/lc-gdn-chef/cookbooks/acme/templates/acme-challange.nginx.erb
deleted file mode 100644
index 9f8e9ca12271888605400d4ced1b1c83499656f5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/acme/templates/acme-challange.nginx.erb
+++ /dev/null
@@ -1,8 +0,0 @@
-server {
- listen 443 ssl;
- server_name <%= @host %>;
- ssl_certificate <%= @cert %>;
- ssl_certificate_key <%= @key %>;
-
- return 202;
-}
diff --git a/lc-gdn-chef/cookbooks/apache2/CHANGELOG-pre4.md b/lc-gdn-chef/cookbooks/apache2/CHANGELOG-pre4.md
deleted file mode 100644
index 33209568ab560c8845a5cafbf10d78274b48f0fd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/CHANGELOG-pre4.md
+++ /dev/null
@@ -1,493 +0,0 @@
-# CHANGELOG-pre4.0
-
-This document describes the changes before the 4.0 release.
-
-## v3.3.1 (2017-07-06)
-
-- [GH-489] Fix OpenSuse service guard
-
-## v3.3.0 (2017-04-11)
-
-- [GH-478] Added support for the amazon platform_family, outside of RHEL
-- [GH-474] Update Berksfile to allow fetching of newer
-- [GH-473] Update copyright header format
-- [GH-472] foodcritic: add sous-chefs rules
-- add CODE_OF_CONDUCT
-- [GH-471] FCGI paths should not be messed with on RHEL/CentOS 7\. CentOS 7 (and recent Fedoras) have Apache 2.4, where
- FCGI socket path and shared memory path is managed adequately without further involvement necessary (subdirectory is
- created under /var/run/httpd).
-- [GH-470] Remove support for EOL Fedora < 18 / FreeBSD 9
-- [GH-465] Testing updates
-- [GH-469] Use the default cookbook style rules
-- [GH-460] ServerSpec to InSpec migration
-- [GH-461] Update comment header format & other Cookstyle fixes
-- [GH-454] Test in Travis with Chef-DK and a Rakefile
-- [GH-455] openSUSE Leap has it's own platform name
-- [GH-279] leave stubs for rhel family `conf.d` files to avoid conflicts on package upgrade; no longer remove `conf.d`,
- just don't use it
-- [GH-427] Add option to configure custom log level
-- [GH-450] Ensure the lock_dir is owned by www-data for Apache 2.2 and 2.4 on Debian/Ubuntu
-- Remove mod_auth_openid tests, as it is not part of the ASF release and plan to drop support for it and right now it is
- failing our tests
-- [GH-440] Update default values in `apache.prefork` section of README
-- [GH-443] fixed typo in copyright year
-- Test on the latest chef with chef-zero
-- Update supported platforms to Ubuntu 16.04, Debian 8.4, CentOS 7.2; deprecating Ubuntu 12.04
-- [GH-422] Fix uniq for nil:NilClass error introduced in 3.2.2
-- [GH-423] allow for apache 2.4 usage on RHEL < 7.0
-- Cookbook is now part of the sous-chefs, but still maintained by the same folks
-- mod_perl: No longer install libapache2-mpm-prefork
-- mod_php: renamed mod_php5 to more generic mod_php; using php 7.0 where available
-
-## v3.2.2 (2016-04-13)
-
-- [GH-420] Allow auto-conversion if either of `apache.listen_ports` or `apache.listen_addresses` are set rather than '
- &&'. This ensures conversion occurs if only one of the two is set.
-
-## v3.2.1 (2016-04-11)
-
-- [GH-225] notify `restart` instead of `reload` service on `apache_conf`, `apache_config`
-- Update to foodcritic 6
-
-## v3.2.0 (2016-03-26)
-
-- [GH-378] Deprecates `apache.listen_addresses` and `apache.listen_ports` in favor of [GH-409]
-- [GH-409] `apache.listen` now accepts an array of `addr:port` strings
-- [GH-358] FreeBSD: Update 10.1 support; Adds php 5.6 in collaboration with chef-cookbooks/php#119
-- [GH-394] Have `apache.prefork.serverlimit` set ServerLimit directive on 2.4
-- [GH-363] Escape '.' in regex for .htaccess/.htpasswd files
-- [GH-365] Force log directory creation to be recursive
-- [GH-368] Change the service creation to use the `apache.service_name` attribute throughout
-- [GH-374] Make metadata.rb compatible with chef versions < 12.
-- [GH-382] Fixed typo in node['platform_family'] for NameError in `mod_proxy_html`
-- [GH-369] README: Added on Ubuntu `mod_fastcgi` requires `multiverse` apt repository to enabled.
-- [GH-381] README: Add missing backtick
-- [GH-384] README: Fix names for a2enconf and a2disconf
-- [GH-393] README: mention availability of `mod_actions` support
-- [GH-383] Debian: Add possibility to use other releases via `apache.default_release`
-- [GH-377] Restart service when including `mod_headers` to allow healing of failed service because of missing
- directives.
-- [GH-416] Change the default of `apache.mod_fastcgi.install_method` to 'package' all platforms, as `source` is no
- longer available.
-- [GH-401] Move `mod_deflate` to `apache.default_modules` and no longer force installation on `debian` families.
-- [GH-386] Do not install an extra mod_ssl package on SUSE Linux Enterprise
-- [GH-335] Do not hardcoded reload/restart on more modern rhel platforms, allowing systemd on CentOS 7
-- [GH-375] Install package `mod_ldap` on CentOS 7 (triggered by `apache.version` == 2.4)
-- Update `apache.mod_ssl.cipher_suite` to latest from
-- README: Re-organize README to make it easier to find usage and remove old references.
-- Added new standard and missing modules (Note: these may not be available natively on all operating systems)
-
- - [mod_http2](http://httpd.apache.org/docs/2.4/mod/mod_http2.html) - Support for the HTTP/2 transport layer. (
- available since 2.4.17)
- - [mod_authnz_fcgi](http://httpd.apache.org/docs/2.4/mod/mod_authnz_fcgi.html) - Enable FastCGI authorizer
- applications to authenticate and/or authorize clients. (available since 2.4.10)
- - [mod_cern_meta](http://httpd.apache.org/docs/2.4/mod/mod_cern_meta.html) - CERN httpd metafile semantics
- - [mod_ident](http://httpd.apache.org/docs/2.4/mod/mod_ident.html) - RFC 1413 ident lookups
- - [mod_privileges](http://httpd.apache.org/docs/2.4/mod/mod_privileges.html) - Support for Solaris privileges and
- for running virtual hosts under different user IDs.
- - [mod_socache_dc](http://httpd.apache.org/docs/2.4/mod/mod_socache_dc.html) - Distcache based shared object cache
- provider.
- - [mod_version](http://httpd.apache.org/docs/2.4/mod/mod_version.html) - Version dependent configuration
- - [mod_watchdog](http://httpd.apache.org/docs/2.4/mod/mod_watchdog.html) - Provides infrastructure for other modules
- to periodically run tasks
-
-## v3.1.0 (2015-05-25)
-
-- [GH-315] Fix `apache.default_site_name` .conf extension references to ensure deletion
-- [GH-258] Use `apache.default_site_name` for consistency, minimize hardcoding of filenames
-- [GH-259] Add `&& sleep 1` to end of apache restart command on rhel-based systems using apache2.2
-- [GH-271] Remove FreeBSD 9.x, Red Hat and CentOS 5.x and OpenSUSE 11.x Series from tests and focus on newer releases
-- [GH-276] Add psych gem to development gems
-- [GH-293] Add `apache.mod_fastcgi.install_method` flag to allow install of mod_fastcgi from source (even on Debian
- family)
-- [GH-285] Made `apache.devel_package` configurable based on platform, including support for Amazon Linux.
-- [GH-316] Update Opscode references to Chef
-- [GH-318] Apply default recipe in all definitions
-- [GH-320] Add attribute to adjust `apache.default_site_port`
-- [GH-321] Fix issue with default_site name in not_if guards
-- [GH-322] Add `apache.mod_ssl.pkg_name` to allow custom mod_ssl package names. Set defaults for supported platforms
- including Amazon Linux
-- [GH-323] Don't create the default site configuration file in `sites-available` unless it is enabled.
-- [GH-324] Add `apache.mod_ssl.port` to set the default ssl port to something other than 443
-- [GH-328] Add the ability to pass in a pipe as to log
-- [GH-332] `SSLStrictSNIVHostCheck` is only written to config if enabled to avoid breaking apache prior to 2.2.12.
-- [GH-334] Removed `iptables`, `god-monitor`, and `logrotate` recipes to avoid having external dependencies. These
- services should be managed in a wrapper cookbook going forward.
-- [GH-339] Allow custom names for php so_filename (`node['apache']['mod_php5']['so_filename']`)
-
-## v3.0.1 (2015-02-11)
-
-- [GH-310] Ubuntu Apache 2.2 requires the lock_dir to be owned by www-data
-- [GH-309] Clarify that apache.version is a string
-- [GH-305] Restart service after MPM changes
-- [GH-304] Don't install systemd module on Amazon Linux
-- [GH-298] Add non-threaded MPM break notice for PHP users
-- [GH-296] Create lock_dir automatically
-
-## v3.0.0 (2014-11-30)
-
-Major version update because of SSL Improvements and new platform MPM and Version defaults.
-
-- [GH-286] Refactor MPM and Apache version defaults: default is now apache 2.4
-- Note: set `apache.mpm` to `prefork` if you are using `mod_php` in Ubuntu >=14.04
-- [GH-281] mod_ssl: Disable SSLv3 by default to protect against POODLE attack (CVE-2014-3566)
-- [GH-280] mod_ssl: Major update with modern Cipher Suite, and best practices. Updated to a more modern
- default `apache.mod_ssl.cipher_suite`. Added the following additional mod_ssl attributes
-
- - `apache.mod_ssl.honor_cipher_order`
- - `apache.mod_ssl.insecure_renegotiation`
- - `apache.mod_ssl.strict_sni_vhost_check`
- - `apache.mod_ssl.session_cache_timeout`
- - `apache.mod_ssl.compression`
- - `apache.mod_ssl.use_stapling`
- - `apache.mod_ssl.stapling_responder_timeout`
- - `apache.mod_ssl.stapling_return_responder_errors`
- - `apache.mod_ssl.stapling_cache`
- - `apache.mod_ssl.pass_phrase_dialog`
- - `apache.mod_ssl.mutex`
- - `apache.mod_ssl.directives`
-
-- [GH-278] Improved chefspec tests execution time
-- [GH-277] Optimize files watching for Guard on Win32 platform
-- [GH-270] Don't attempt start until after configuration is written
-- [GH-268] Now uses chefspec 4.1
-- [GH-267] Use Supermarket as the Berkshelf 3 source
-- [GH-266] Rubocop based ruby style/syntax improvements
-- [GH-264] mod_ssl: Add new attribute for to be ready to any custom directive
-- [GH-249] Don't prepend Apache log path when requesting error logging to syslog
-- [GH-247] Explicitly include mod_ldap before mod_authnz_ldap
-- [GH-243] Expand mpm options for different distros/versions.
-- [GH-239] Added `apache.mod_php5.install_method` attribute defaults to `package`. Install packages unless PHP is
- compiled from source.
-- OneHealth Solutions was acquired by Viverae
-- Remove ArchLinux pacman as a dependency and handle similar to apt, yum, zypper
-- Adjust ubuntu apache 2.4 docroot_dir to match package (from /var/www to /var/www/html)
-- [GH-238] Bump service config syntax check guard timeout to 10 seconds
-- [GH-235] Removed `apache2::mpm_itk` which is not part of core and therefore should be its own cookbook
-- [GH-234] /var/run/httpd/mod_fcgid directory now belongs to apache on Fedora/RHEL systems.
-- [GH-233] Default web_app template should return 503 status code when maintenance file is present
-- [GH-232] Cookbook now deletes a2* if they are symlinks before dropping template versions
-- [GH-222] Set TraceEnable to off by default.
-- [GH-213] Adjust chefspec to use the package resource on FreeBSD (previously freebsd_package)
-- [GH-212] New attribute apache.locale which sets LANG. defaults to 'C'
-- [GH-210] Clarify web_app definition usage around configuration templates.
-- [GH-208] `apache_conf` now accepts `source` and `cookbook` parameters.
-
-## v2.0.0 (2014-08-06)
-
-Major version update because of major overhaul to support Apache 2.4 and a2enconf and a2endisconf changes.
-
-- [GH-204] mod_auth_openid: Added `apache.mod_auth_openid.version` attribute
-- FreeBSD support has been improved with the release of chef 11.14.2, portsnap is no longer used in favor of pkgng.
-- [GH-157] - Apache will only be started when a configuration test passes, this allows the chef run to fix any broken
- configuration without failing the chef run.
-- `apache.log_dir` directory is now 0755 on all platforms (including the debian platform family)
-- [GH-166, GH-173] - `conf.d` is no longer used and replaced by `conf-available` and `conf-enabled` managed via
- the `a2enconf` and `a2disconf` scripts
-- [GH-166, GH-173] - All configuration files need to end in `.conf` for them to be loaded
-- [GH-173] - Perl is a required package on all platforms to support the a2* scripts as we now use the debian versions
- directly.
-- [GH-193] - per MPM settings: `maxclients` is now `maxrequestworkers`
-- [GH-194] - per MPM settings: `maxrequestsperchild` is now `maxconnectionsperchild`
-- [GH-161] - Added support for CentOS 7
-- [GH-180] - Improved SuSE support
-- [GH-100] - Apache HTTP 2.4 support This provides Apache 2.4 support in a backwards compatible way. It adds the
- following new attributes:
-
- - `apache.version` - This defaults to `2.2` and if changed to `2.4`; it triggers and assumes 2.4 packages will be
- installed.
- - `apache.mpm` - In 2.4 mode, this specifies which mpm to install. Default is `prefork`.
- - `apache.run_dir`
- - `apache.lock_dir`
- - `apache.libexec_dir` replaces `apache.libexecdir`
- - `apache.prefork.maxrequestworkers` replaces `apache.prefork.maxclients`
- - `apache.prefork.maxconnectionsperchild` replaces `apache.prefork.maxrequestsperchild`
- - `apache.worker.threadlimit`
- - `apache.worker.maxrequestworkers` replaces `apache.worker.maxclients`
- - `apache.worker.maxconnectionsperchild`replaces `apache.worker.maxrequestsperchild`
- - `apache.event.startservers`
- - `apache.event.serverlimit`
- - `apache.event.minsparethreads`
- - `apache.event.maxsparethreads`
- - `apache.event.threadlimit`
- - `apache.event.threadsperchild`
- - `apache.event.maxrequestworkers`
- - `apache.event.maxconnectionsperchild`
- - `apache.itk.startservers`
- - `apache.itk.minspareservers`
- - `apache.itk.maxspareservers`
- - `apache.itk.maxrequestworkers`
- - `apache.itk.maxconnectionsperchild`
-
- Apache 2.4 Upgrade Notes:
-
- Since the changes between apache 2.2 and apache 2.4 are pretty significant, we are unable to account for all changes
- needed for your upgrade. Please take a moment to familiarize yourself with the Apache Software Foundation provided
- upgrade documentation before attempting to use this cookbook with apache 2.4\.
- See
-
- - This cookbook does not automatically specify which version of apache to install. We are at the mercy of
- the `package` provider. It is important, however, to make sure that you configure the `apache.version` attribute
- to match. For your convenience, we try to set reasonable defaults based on different platforms in our test suite.
- - `mod_proxy` - In 2.4 mode, `apache.proxy.order`, `apache.proxy.deny_from`, `apache.proxy.allow_from` are ignored,
- as the attributes can not be supported in a backwards compatible way. Please use `apache.proxy.require` instead.
-
-## v1.11.0 (2014-07-25)
-
-- [GH-152] - Checking if server_aliases is defined in example
-- [GH-106] - Only turn rewrite on once in web_app.conf.erb
-- [GH-156] - Correct mod_basic/digest recipe names in README
-- Recipe iptables now includes the iptables::default recipe
-- Upgrade test-kitchen to latest version
-- Replaced minitest integration tests with serverspec tests
-- Added chefspec tests
-
-## v1.10.4 (2014-04-23)
-
-- [COOK-4249] mod_proxy_http requires mod_proxy
-
-## v1.10.2 (2014-04-09)
-
-- [COOK-4490] - Fix minitest `apache_configured_ports` helper
-- [COOK-4491] - Fix minitest: escape regex interpolation
-- [COOK-4492] - Fix service[apache2] CHEF-3694 duplication
-- [COOK-4493] - Fix template[ports.conf] CHEF-3694 duplication
-
-As of 2014-04-04 and
-per [Community Cookbook Diversification](https://wiki.chef.io/display/chef/Community+Cookbook+Diversification) this
-cookbook now maintained by OneHealth Solutions. Please be patient as we get into the swing of things.
-
-## v1.10.0 (2014-03-28)
-
-- [COOK-3990] - Fix minitest failures on EL5
-- [COOK-4416] - Support the ability to point to local apache configs
-- [COOK-4469] - Use reload instead of restart on RHEL
-
-## v1.9.6 (2014-02-28)
-
-[COOK-4391] - uncommenting the PIDFILE line
-
-## v1.9.4 (2014-02-27)
-
-Bumping version for toolchain
-
-## v1.9.1 (2014-02-27)
-
-[COOK-4348] Allow arbitrary params in sysconfig
-
-## v1.9.0 (2014-02-21)
-
-- **[COOK-4076](https://tickets.chef.io/browse/COOK-4076)** - foodcritic: dependencies are not defined properly
-- **[COOK-2572](https://tickets.chef.io/browse/COOK-2572)** - Add mod_pagespeed recipe to apache2
-- **[COOK-4043](https://tickets.chef.io/browse/COOK-4043)** - apache2 cookbook does not depend on 'iptables'
-- **[COOK-3919](https://tickets.chef.io/browse/COOK-3919)** - Move the default pidfile for apache2 on Ubuntu 13.10 or
- greater
-- **[COOK-3863](https://tickets.chef.io/browse/COOK-3863)** - Add recipe for mod_jk
-- **[COOK-3804](https://tickets.chef.io/browse/COOK-3804)** - Fix incorrect datatype for apache/default_modules, use
- recipes option in metadata
-- **[COOK-3800](https://tickets.chef.io/browse/COOK-3800)** - Cannot load modules that use non-standard module
- identifiers
-- **[COOK-1689](https://tickets.chef.io/browse/COOK-1689)** - The perl package name should be configurable
-
-## v1.8.14
-
-Version bump for toolchain sanity
-
-## v1.8.12
-
-Fixing various style issues for travis
-
-## v1.8.10
-
-fixing metadata version error. locking to 3.0"
-
-## v1.8.8
-
-Version bump for toolchain sanity
-
-## v1.8.6
-
-Locking yum dependency to '< 3'
-
-## v1.8.4
-
-- **[COOK-3769](https://tickets.chef.io/browse/COOK-3769)** - Fix a critical bug where the `apache_module` could not
- enable modules
-
-## v1.8.2
-
-- **[COOK-3766](https://tickets.chef.io/browse/COOK-3766)** - Fix an issue where the `mod_ssl` recipe fails due to a
- missing attribute
-
-## v1.8.0
-
-- **[COOK-3680](https://tickets.chef.io/browse/COOK-3680)** - Update template paths
-- **[COOK-3570](https://tickets.chef.io/browse/COOK-3570)** - Apache cookbook breaks on RHEL / CentOS 6
-- **[COOK-2944](https://tickets.chef.io/browse/COOK-2944)** - Fix foodcritic failures
-- **[COOK-2893](https://tickets.chef.io/browse/COOK-2893)** - Improve mod_auth_openid recipe with guards and idempotency
-- **[COOK-2758](https://tickets.chef.io/browse/COOK-2758)** - Fix use of non-existent attribute
-- **[COOK-3665](https://tickets.chef.io/browse/COOK-3665)** - Add recipe for mod_userdir
-- **[COOK-3646](https://tickets.chef.io/browse/COOK-3646)** - Add recipe for mod_cloudflare
-- **[COOK-3213](https://tickets.chef.io/browse/COOK-3213)** - Add recipe for mod_info
-- **[COOK-3656](https://tickets.chef.io/browse/COOK-3656)** - Parameterize apache2 binary
-- **[COOK-3562](https://tickets.chef.io/browse/COOK-3562)** - Allow mod_proxy settings to be configured as attributes
-- **[COOK-3326](https://tickets.chef.io/browse/COOK-3326)** - Fix default_test to use ServerTokens attribute
-- **[COOK-2635](https://tickets.chef.io/browse/COOK-2635)** - Add support for SVG mime types
-- **[COOK-2598](https://tickets.chef.io/browse/COOK-2598)** - FastCGI Module only works on Debian-based platforms
-- **[COOK-1984](https://tickets.chef.io/browse/COOK-1984)** - Add option to configure the address apache listens to
-
-## v1.7.0
-
-- [COOK-3073]: make access.log location configurable per-platform
-- [COOK-3074]: don't hardcode the error.log location in the default site config
-- [COOK-3268]: don't hardcode DocumentRoot and cgi-bin locations in `default_site`
-- [COOK-3184]: Add `mod_filter` recipe to Apache2-cookbook
-- [COOK-3236]: Add `mod_action` recipe to Apache2-cookbook
-
-## v1.6.6
-
-1.6.4 had a missed step in the automated release, long live 1.6.6.
-
-- [COOK-3018]: apache2_module does duplicate delayed restart of apache2 service when conf = true
-- [COOK-3027]: Default site enable true, then false, does not disable default site
-- [COOK-3109]: fix apache lib_dir arch attribute regexp
-
-## v1.6.2
-
-- [COOK-2535] - `mod_auth_openid` requires libtool to run autogen.sh
-- [COOK-2667] - Typo in usage documentation
-- [COOK-2461] - `apache2::mod_auth_openid` fails on some ubuntu systems
-- [COOK-2720] - Apache2 minitest helper function `ran_recipe` is not portable
-
-## v1.6.0
-
-- [COOK-2372] - apache2 mpm_worker: add ServerLimit attribute (default to 16)
-
-## v1.5.0
-
-The `mod_auth_openid` attributes are changed. The upstream maintainer deprecated the older release versions, and the
-source repository has releases available at specific SHA1SUM references. The new
-attribute, `node['apache']['mod_auth_openid']['ref']` is used to set this.
-
-- [COOK-2198] - `apache::mod_auth_openid` compiles from source, but does not install make on debian/ubuntu
-- [COOK-2224] - version conflict between cucumber and other gems
-- [COOK-2248] - `apache2::mod_php5` uses `not_if` "which php" without ensuring package 'which' is installed
-- [COOK-2269] - Set allow list for mod_status incase external monitor scripts need
-- [COOK-2276] - cookbook apache2 documentation regarding listening ports doesn't match default attributes
-- [COOK-2296] - `mod_auth_openid` doesn't have tags/releases for the version I need for features and fixes
-- [COOK-2323] - Add Oracle linux support
-
-## v1.4.2
-
-- [COOK-1721] - fix logrotate recipe
-
-## v1.4.0
-
-- [COOK-1456] - iptables enhancements
-- [COOK-1473] - apache2 does not disable default site when setting "`default_site_enabled`" back to false
-- [COOK-1824] - the apache2 cookbook needs to specify which binary is used on rhel platform
-- [COOK-1916] - Download location wrong for apache2 `mod_auth_openid` >= 0.7
-- [COOK-1917] - Improve `mod_auth_openid` recipe to handle module upgrade more gracefully
-- [COOK-2029] - apache2 restarts on every run on RHEL and friends, generate-module-list on every run.
-- [COOK-2036] - apache2: Cookbook style
-
-## v1.3.2
-
-- [COOK-1804] - fix `web_app` definition parameter so site can be disabled.
-
-## v1.3.0
-
-- [COOK-1738] - Better configuration for `mod_include` and some overrides in `web_app` definition
-- [COOK-1470] - Change SSL Ciphers to Mitigate BEAST attack
-
-## v1.2.0
-
-- [COOK-692] - delete package conf.d files in module recipes, for EL
-- [COOK-1693] - Foodcritic finding for unnecessary string interpolation
-- [COOK-1757] - platform_family and better style / usage practices
-
-## v1.1.16
-
-re-releasing as .16 due to error on tag 1.1.14
-
-- [COOK-1466] - add `mod_auth_cas` recipe
-- [COOK-1609] - apache2 changes ports.conf twice per run when using apache2::mod_ssl
-
-## v1.1.12
-
-- [COOK-1436] - restore apache2 web_app definition
-- [COOK-1356] - allow ExtendedStatus via attribute
-- [COOK-1403] - add mod_fastcgi recipe
-
-## v1.1.10
-
-- [COOK-1315] - allow the default site to not be enabled
-- [COOK-1328] - cookbook tests (minitest, cucumber)
-
-## v1.1.8
-
-- Some platforms with minimal installations that don't have perl won't have a `node['languages']['perl']` attribute, so
- remove the conditional and rely on the power of idempotence in the package resource.
-- [COOK-1214] - address foodcritic warnings
-- [COOK-1180] - add `mod_logio` and fix `mod_proxy`
-
-## v1.1.6
-
-FreeBSD users: This release requires the `freebsd` cookbook. See README.md.
-
-- [COOK-1025] - freebsd support in mod_php5 recipe
-
-## v1.1.4
-
-- [COOK-1100] - support amazon linux
-
-## v1.1.2
-
-- [COOK-996] - apache2::mod_php5 can cause PHP and module API mismatches
-- [COOK-1083] - return string for v_f_p and use correct value for default
-
-## v1.1.0
-
-- [COOK-861] - Add `mod_perl` and apreq2
-- [COOK-941] - fix `mod_auth_openid` on FreeBSD
-- [COOK-1021] - add a commented-out LoadModule directive to keep apxs happy
-- [COOK-1022] - consistency for icondir attribute
-- [COOK-1023] - fix platform test for attributes
-- [COOK-1024] - fix a2enmod script so it runs cleanly on !bash
-- [COOK-1026] - fix `error_log` location on FreeBSD
-
-## v1.0.8
-
-- COOK-548 - directory resource doesn't have backup parameter
-
-## v1.0.6
-
-- COOK-915 - update to `mod_auth_openid` version 0.6, see **Recipes/mod_auth_openid** below.
-- COOK-548 - Add support for FreeBSD.
-
-## v1.0.4
-
-- COOK-859 - don't hardcode module paths
-
-## v1.0.2
-
-- Tickets resolved in this release: COOK-788, COOK-782, COOK-780
-
-## v1.0.0
-
-- Red Hat family support is greatly improved, all recipes except `god_monitor` converge.
-- Recipe `mod_auth_openid` now works on RHEL family distros
-- Recipe `mod_php5` will now remove config from package on RHEL family so it doesn't conflict with the cookbook's.
-- Added `php5.conf.erb` template for `mod_php5` recipe.
-- Create the run state directory for `mod_fcgid` to prevent a startup error on RHEL version 6.
-- New attribute `node['apache']['lib_dir']` to handle lib vs lib64 on RHEL family distributions.
-- New attribute `node['apache']['group']`.
-- Scientific Linux support added.
-- Use a file resource instead of the generate-module-list executed perl script on RHEL family.
-- "default" site can now be disabled.
-- web_app now has an "enable" parameter.
-- Support for dav_fs apache module.
-- Tickets resolved in this release: COOK-754, COOK-753, COOK-665, COOK-624, COOK-579, COOK-519, COOK-518
-- Fix node references in template for a2dissite
-- Use proper user and group attributes on files and templates.
-- Replace the anemic README.rdoc with this new and improved superpowered README.md :).
diff --git a/lc-gdn-chef/cookbooks/apache2/CHANGELOG.md b/lc-gdn-chef/cookbooks/apache2/CHANGELOG.md
deleted file mode 100644
index f383f6a3a669fd96540104fff23f6b81df57a7a6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/CHANGELOG.md
+++ /dev/null
@@ -1,283 +0,0 @@
-# apache2 Cookbook Changelog
-
-This file is used to list changes made in each version of the apache2 cookbook.
-
-## 8.14.3 - *2022-04-20*
-
-Standardise files with files in sous-chefs/repo-management
-
-## 8.14.2 - *2022-02-03*
-
-- Remove delivery and switch to using reusable CI workflow
-- Update tested platforms
- - removed: CentOS 8, Debian 9
- - added: Rocky / Alma 8, Debian 11
-- Fix mod_php on Debian 11
-- Fedora fixes
- - mod-auth-cas
- - mod-wsgi
- - Drop support for mod_php
-
-## 8.14.1 - *2021-11-03*
-
-- Add CentOS Stream 8 to CI pipeline
-
-## 8.14.0 - *2021-08-31*
-
-- Add `envvars_additional_params` property to install resource
-
-## 8.13.1 - *2021-08-30*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 8.13.0 - *2021-07-09*
-
-- Add `default_charset`, `server_signature`, `server_tokens`, and `trace_enable` to `install` resource
-- Add `install_override` test suite
-
-## 8.12.0 - *2021-07-08*
-
-- Add `variables` property to `default_site` resource
-- Convert test suites `basic_site` and `mod_wsgi` to use updated `default_site` resource
-
-## 8.11.2 - *2021-07-06*
-
-- Fixed error with delivery where it was expecting string interpolation incorrectly
-- Fix EL8 welcome page
-
-## 8.11.1 - *2021-06-01*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 8.11.0 - *2021-05-06*
-
-- Add missing unified_mode from mod_php and mod_wscgi
-- Fix service resource restarting the service every run
-- Bump minimum Chef version to 15.3 to support unified_mode
-
-## 8.10.0 - *2021-04-09*
-
-- Fix `apache2_mod_auth_cas` resource for all supported platforms
-- Fix apache devel package name on SUSE platforms
-- Fix `libexec_dir` variable in `auth_cas.load` template
-- Add Integration tests for `apache2_mod_auth_cas` resource
-- Add docs for `apache2_mod_auth_cas`
-- Add `:source_checksum`, `:login_url`, `:validate_url`, `:directives` properties to `apache2_mod_auth_cas` resource
-- Allow `apache2_mod_auth_cas` resource to be nameless
-- Update `mod_auth_cas` source version to 1.2 and other various updates for source installations
-- Install `mod_auth_cas` by source on CentOS 8 and SUSE platforms (distro package is not currently available)
-- Include yum-epel recipe on RHEL/Amazon platforms
-
-## 8.9.1 - *2021-03-03*
-
-- Fix url in README
-
-## 8.9.0 - *2021-01-27*
-
-- Enable `options` property to pass arbitrary variables to the conf template
-
-## 8.8.0 - *2021-01-26*
-
-- Remove support and testing for Ubuntu 16.04
-
-## 8.7.0 - *2020-11-20*
-
-- Add `template_cookbook` property to `install`
-
-## 8.6.0 (2020-10-13)
-
-- Add `apache2_mod_wsgi` resource
-- Fix backwards compatibility for SUSE with `a2enmod`
-
-## 8.5.1 (2020-10-02)
-
-- Add apache namespace for `site_available?` and `site_enabled?` helper methods
-
-## 8.5.0 (2020-09-22)
-
-- resolved cookstyle error: spec/libraries/default_modules_spec.rb:8:7
- refactor: `ChefCorrectness/IncorrectLibraryInjection`
-- Cookstyle Bot Auto Corrections with Cookstyle 6.17.7
-- Directly include Apache2::Cookbook::Helpers in recipes and resources by default
-- `config` has now `template_cookbook` property to use an external template
-
-## 8.4.0 (2020-09-09)
-
-- resolved cookstyle error: test/cookbooks/test/recipes/php.rb:1:1 refactor: `ChefCorrectness/IncorrectLibraryInjection`
-- Allow override of package name and version in `install` resource
-- Add tests for package name override
-
-## 8.3.0 (2020-07-13)
-
-- Add `mod_php` resource
-
-## 8.2.1 (2020-06-29)
-
-- Add missing lib_dir variable to `a2enmod` template
-
-## 8.2.0 (2020-06-18)
-
-- Updated helpers to use platform_family? when possible to simplify code
-- Fixed incorrect platform version comparison logic in the helpers
-- Add new platforms to the Kitchen configs
-- Remove logic in the Linux helpers that checked for systemd vs. non-systemd since we only support systemd platforms now
-
-## 8.1.2 - 2020-06-02
-
-- resolved cookstyle error: libraries/helpers.rb:196:14 refactor: `ChefCorrectness/InvalidPlatformFamilyInCase`
-- Enable unified_mode for all resources
- - This deprecates support for Chef Infra Client 14 and below
-
-## [8.1.1] - 2020-04-12
-
-- Add CentOS 8 to CI pipeline
-- Add Debian 10 / Remove Debian 8 from CI pipeline
-- Rename libexec_dir to apache_libexec_dir
-
-## [8.1.0] - 2020-03-06
-
-- Add 'template_cookbook' property to apache2_module
-- Migrated to Github Actions for testing
-
-### Fixed
-
-- Cookstyle fixes
-
-### Removed
-
-- Removed circleci testing
-
-## [8.0.2] - 2019-11-15
-
-- default_apache_root_group: replace with ohai root_group
-
-## [8.0.1] - 2019-11-15
-
-- Fix not reloading service when changes in port.conf / apache2.conf
-
-## [8.0.0] - 2019-11-13
-
-- Fix cache_dir permission so that modules can write in their cache_dir/module/ storage space
-- Latest Cookstyle changes in cookstyle 5.6.2
-- Fixed bug with freebsd and suse modules adding an array to an array
-- Fixed mod_ssl for suse
-- Fixed docroot paths for suse
-
-### Breaking Changes
-
-- Renamed `:cookbook` property for `apache2_default_site` resource### Added
-
-## [7.1.1] - 2019-08-07
-
-- Allow overwriting cookbook for apache2_mod templates using `template_cookbook` property.
-
-## [7.1.0] - 29-05-2019
-
-- Add upgrading examples in UPGRADING.md
-- Remove references to recipes in README.md and add a simple example
-- Allow users to set / alter the default module list
-- Allow users to alter the default modules configuration without re### Added
-
-- Uniform way to pass IP's in mod_info and mod_status
-
-## [7.0.0] - 05-03-2019
-
-- Remove all recipes
-- Use `declare_resource` in `apache2_module`
-- Add default value to `apache_2_mod_proxy`
-- Fix spelling of `default` in `access_file_name` property in `install.rb`
-
-## [6.0.0] - 25-02-2019
-
-See UPGRADING.md for upgrading.
-
-### v6 - Behaviour Changes
-
-- Default recipe now calls the install resource
-- Add helpers: for a full list see `libraries/helpers.rb`
-- Remove all `mpm_` recipes. Move mpm setup to the install resource
-- Allow user to set the mpm mode no matter what platform they're on
-- Remove FreeBSD, openSuse & Fedora Kitchen testing
-- `mod_` recipes now call `apache2_module_`
-- Mod templates are now more configurable when calling the resources directly
-- Add apache2_default_site resource
-- Remove and document apache2_webapp resource
-- Add the default_site resource for managing the default site
-- Add site resource
-- Remove the web_app resource as it was very perscriptive
-- Add mod_ssl
-
-### v6 - Testing/CI
-
-- Add CircleCI and remove Travis
-- Add CircleCI Orbs
-- Rename test cookbook name to test
-- Cleanup test integration directory
-- Specs added for most helpers
-- Make sysconfig parameters configurable via the install resource
-
-### v6 - Misc Updates & Improvements
-
-- Update README with new instructions on installing
-- Set the server to listen on ports 80 and 443 by default
-- Fix Options allowed in alias.conf
-- Add resource documentation to documentation directory
-
-## [5.2.1] - 04-09-2018
-
-- Revert ports.conf fix (ports.conf that gets installed by package conflicts.
-
-## [5.2.0] - 26-08-2018
-
-- Drop Chef 12 support
-- Add Danger and CircleCI support
-- Move apache binary detection to the helpers file
-- Adds apache_platform_service_name, apache_dir ,apache_conf_dir helpers
-- Update kitchen configuration
-- Fix ports.conf location and how its set up (#550, skadz)
-- Allow httpd -t timeout to be configurable (#547, skadz)
-
-## [5.0.1] - 2017-09-01
-
-- Test using dokken-images in kitchen-dokken
-- Fix readme section for mod_php
-- Replace the existing testing.md contents with a link to the Chef testing docs
-- Fix mod_ldap failing on non-RHEL platforms
-- Fix mod_dav_svn to install the correct packages on Debian 8/9
-
-## [5.0.0] - 2017-07-13
-
-### Breaking changes
-
-- Support for Apache 2.2 has been fully removed so we can focus on providing a solid experience for Apache 2.4 and
- above. This removes support for RHEL 6, SLES 11, and Debian 7
-
-### Other changes
-
-- Fixed openSUSE support in multiple places and added integration testing for openSUSE in Travis
-
-## [4.0.0] - 2017-07-10
-
-- This cookbook now requires Chef 12.1 or later
-- Support for Apache 2.2 on FreeBSD has been removed
-- Support for Amazon Linux < 2013.09 has been removed
-- Support for end of life releases of Fedora (< 24), CentOS (5), Debian (6), Linux Mint (17), and Ubuntu (12.04) have
- been removed
-- Removed the deprecated recipes for mod_authz_default and mod_php5
-- Switched many package resources to Chef 12+ multipackage style to speed up Chef converges and reduce log clutter
-- mod_cache is now enabled when using mod_cache_disk and mod_cache_socache
-- The mod_cloudflare recipe now sets up the Apt repo with https
-- Improved support for Amazon Linux on Chef 13 and added Test Kitchen amazon testing
-- Improved support for Debian and RHEL derivative platforms
-- Improved Fedora support in multiple modules
-- Improved error logging for unsupported platforms in the mod_negotiation and mod_unixd recipes
-- Switched from Rake for testing to Delivery local mode
-- Setup integration testing with kitchen-dokken for core platforms in Travis so that every PR is now fully tested
-- Removed the EC2 and Docker kitchen files now that we have kitchen-dokken setup
-- Removed apt, pacman, yum, and zypper from the Berksfile as they're no longer needed for testing
-- Removed testing dependencies from the Gemfile as we're testing using ChefDK
-- Added integration testing for new Debian releases
-
-## Pre 4.0 Changelog
-
-For changelog entries pre4.0 please see [the pre-4.0 CHANGELOG](CHANGELOG-pre4.md).
diff --git a/lc-gdn-chef/cookbooks/apache2/LICENSE b/lc-gdn-chef/cookbooks/apache2/LICENSE
deleted file mode 100644
index 8dada3edaf50dbc082c9a125058f25def75e625a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/apache2/README.md b/lc-gdn-chef/cookbooks/apache2/README.md
deleted file mode 100644
index bdf740b305a3d6e56c5a3d4c59a332f7abee822b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/README.md
+++ /dev/null
@@ -1,140 +0,0 @@
-# apache2 Cookbook
-
-[](https://supermarket.chef.io/cookbooks/apache2)
-[](https://github.com/sous-chefs/apache2/actions?query=workflow%3Aci)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-This cookbook provides a complete Debian/Ubuntu style Apache HTTPD configuration. Non-Debian based distributions such as
-Red Hat/CentOS, ArchLinux and others supported by this cookbook will have a configuration that mimics Debian/Ubuntu
-style as it is easier to manage with Chef.
-
-Debian-style Apache configuration uses scripts to manage modules and sites (vhosts). The scripts are:
-
-- a2ensite
-- a2dissite
-- a2enmod
-- a2dismod
-- a2enconf
-- a2disconf
-
-This cookbook ships with templates of these scripts for non-Debian based platforms.
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working
-together to maintain important cookbooks. If you’d like to know more please
-visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack
-in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-## Cookbooks
-
-Depending on your OS configuration and security policy, you may need additional recipes or cookbooks for this cookbook's
-recipes to converge on the node. In particular, the following Operating System settings may affect the behavior of this
-cookbook:
-
-- SELinux enabled
-- Firewalls (such as iptables, ufw, etc.)
-- Compile tools
-- 3rd party repositories
-
-On RHEL, SELinux is enabled by default. The [selinux](https://supermarket.chef.io/cookbooks/selinux) cookbook contains
-a `permissive` recipe that can be used to set SELinux to "Permissive" state. Otherwise, additional recipes need to be
-created by the user to address SELinux permissions.
-
-To deal with firewalls Chef Software does provide an [iptables](https://supermarket.chef.io/cookbooks/iptables)
-and [ufw](https://supermarket.chef.io/cookbooks/ufw) cookbook but is migrating from the approach used there to a more
-robust solution utilizing the general [firewall](https://supermarket.chef.io/cookbooks/firewall) cookbook to setup
-rules. See those cookbooks' READMEs for documentation.
-
-On ArchLinux, if you are using the `apache2::mod_auth_openid` recipe, you also need
-the [pacman](https://supermarket.chef.io/cookbooks/pacman) cookbook for the `pacman_aur` LWRP. Put `recipe[pacman]` on
-the node's expanded run list (on the node or in a role). This is not an explicit dependency because it is only required
-for this single recipe and platform; the pacman default recipe performs `pacman -Sy` to keep pacman's package cache
-updated.
-
-## Platforms
-
-The following platforms and versions are tested and supported using [test-kitchen](http://kitchen.ci/)
-
-- Ubuntu 18.04 / 20.04
-- Debian 10 / 11
-- CentOS 7+ (incl. Rocky & Alma)
-- Fedora latest
-- OpenSUSE Leap
-
-### Notes for RHEL Family
-
-Apache2.4 support for Centos 6 is not officially supported.
-
-## Usage
-
-It is recommended to create a project or organization
-specific [wrapper cookbook](https://blog.chef.io/doing-wrapper-cookbooks-right) and add the desired custom resources to
-the run list of a node. Depending on your environment, you may have multiple roles that use different recipes from this
-cookbook. Adjust any attributes as desired.
-
-```ruby
-# service['apache2'] is defined in the apache2_default_install resource but other resources are currently unable to reference it. To work around this issue, define the following helper in your cookbook:
-service 'apache2' do
- service_name lazy { apache_platform_service_name }
- supports restart: true, status: true, reload: true
- action :nothing
-end
-
-apache2_install 'default_install'
-apache2_module 'headers'
-apache2_module 'ssl'
-
-apache2_default_site 'foo' do
- default_site_name 'my_site'
- template_cookbook 'my_cookbook'
- port '443'
- template_source 'my_site.conf.erb'
- action :enable
-end
-```
-
-Example wrapper cookbooks:
-[basic site](https://github.com/sous-chefs/apache2/blob/master/test/cookbooks/test/recipes/basic_site.rb)
-[ssl site](https://github.com/sous-chefs/apache2/blob/master/test/cookbooks/test/recipes/mod_ssl.rb)
-
-## Resources
-
-- [install](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_install.md)
-- [default_site](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_default_site.md)
-- [site](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_site.md)
-- [conf](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_conf.md)
-- [config](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_config.md)
-- [mod](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_mod.md)
-- [module](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_module.md)
-- [mod_php](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_mod_php.md)
-- [mod_wsgi](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_mod_wsgi.md)
-- [mod_auth_cas](https://github.com/sous-chefs/apache2/blob/master/documentation/resource_apache2_mod_auth_cas.md)
-
-## Contributors
-
-This project exists thanks to all the people
-who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/chefignore b/lc-gdn-chef/cookbooks/apache2/chefignore
deleted file mode 100644
index a27b0b258ce499b1c554591535a85bcdb9292236..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen*.yml
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/apache2/files/apache2_module_conf_generate.pl b/lc-gdn-chef/cookbooks/apache2/files/apache2_module_conf_generate.pl
deleted file mode 100644
index 0aef14151f6ebe748658e4e778d94ddb00a4d2ce..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/files/apache2_module_conf_generate.pl
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/perl
-
-=begin
-
-Generates Ubuntu style module.load files.
-
-./apache2_module_conf_generate.pl /usr/lib64/httpd/modules /etc/httpd/mods-available
-
-ARGV[0] is the apache modules directory, ARGV[1] is where you want them.
-
-=cut
-
-use File::Find;
-
-use strict;
-use warnings;
-
-die "Must have '/path/to/modules' and '/path/to/modules.load'"
- unless $ARGV[0] && $ARGV[1];
-
-find(
- {
- wanted => sub {
- return 1 if $File::Find::name !~ /\.so$/;
- my $modfile = $_;
- $modfile =~ /(lib|mod_)(.+)\.so$/;
- my $modname = $2;
- my $filename = "$ARGV[1]/$modname.load";
- unless ( -f $filename ) {
- open( FILE, ">", $filename ) or die "Cannot open $filename";
- print FILE "LoadModule " . $modname . "_module $File::Find::name\n";
- close(FILE);
- }
- },
- follow => 1,
- },
- $ARGV[0]
-);
-
-exit 0;
diff --git a/lc-gdn-chef/cookbooks/apache2/libraries/helpers.rb b/lc-gdn-chef/cookbooks/apache2/libraries/helpers.rb
deleted file mode 100644
index 8e7bedf52471d91e718efd7bf58dbfa25a214503..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/libraries/helpers.rb
+++ /dev/null
@@ -1,500 +0,0 @@
-module Apache2
- module Cookbook
- module Helpers
- def apache_binary
- case node['platform_family']
- when 'debian'
- '/usr/sbin/apache2'
- when 'freebsd'
- '/usr/local/sbin/httpd'
- else
- '/usr/sbin/httpd'
- end
- end
-
- def apache_platform_service_name
- case node['platform_family']
- when 'debian', 'suse'
- 'apache2'
- when 'freebsd'
- 'apache24'
- else
- 'httpd'
- end
- end
-
- def default_types_config
- case node['platform_family']
- when 'arch'
- "#{apache_dir}/conf/mime.types"
- when 'freebsd'
- "#{apache_dir}/mime.types"
- else
- '/etc/mime.types'
- end
- end
-
- def apachectl
- case node['platform_family']
- when 'debian', 'suse'
- '/usr/sbin/apache2ctl'
- when 'freebsd'
- '/usr/local/sbin/apachectl'
- else
- '/usr/sbin/apachectl'
- end
- end
-
- def apache_dir
- case node['platform_family']
- when 'debian', 'suse'
- '/etc/apache2'
- when 'freebsd'
- '/usr/local/etc/apache24'
- else
- '/etc/httpd'
- end
- end
-
- def lib_dir
- arch = node['kernel']['machine']
-
- case node['platform_family']
- when 'rhel', 'amazon', 'fedora'
- if arch =~ /64/ || %w(armv8l s390x).include?(arch)
- '/usr/lib64/httpd'
- else
- '/usr/lib/httpd'
- end
- when 'suse'
- if arch =~ /64/ || %w(armv8l s390x).include?(arch)
- '/usr/lib64/apache2'
- else
- '/usr/lib/apache2'
- end
- when 'freebsd'
- '/usr/local/libexec/apache24'
- when 'arch'
- '/usr/lib/httpd'
- else
- '/usr/lib/apache2'
- end
- end
-
- def apache_libexec_dir
- if platform_family?('freebsd', 'suse')
- lib_dir
- else
- File.join(lib_dir, 'modules')
- end
- end
-
- def apache_conf_dir
- case node['platform_family']
- when 'debian', 'suse'
- '/etc/apache2'
- when 'freebsd'
- '/usr/local/etc/apache24'
- else
- '/etc/httpd/conf'
- end
- end
-
- def icon_dir
- case node['platform_family']
- when 'debian', 'suse'
- '/usr/share/apache2/icons'
- when 'freebsd'
- '/usr/local/www/apache24/icons'
- else
- '/usr/share/httpd/icons'
- end
- end
-
- def perl_pkg
- platform_family?('freebsd') ? 'perl5' : 'perl'
- end
-
- def default_apache_pkg
- case node['platform_family']
- when 'amazon'
- 'httpd'
- when 'rhel'
- 'httpd'
- when 'debian', 'suse'
- 'apache2'
- when 'arch'
- 'apache'
- when 'freebsd'
- 'apache24'
- else
- 'httpd'
- end
- end
-
- def default_log_dir
- case node['platform_family']
- when 'debian', 'suse'
- '/var/log/apache2'
- when 'freebsd'
- '/var/log'
- else
- '/var/log/httpd'
- end
- end
-
- def cache_dir
- case node['platform_family']
- when 'debian', 'suse'
- '/var/cache/apache2'
- when 'freebsd'
- '/var/cache/apache24'
- else
- '/var/cache/httpd'
- end
- end
-
- def default_cache_root
- if platform_family?('debian', 'suse', 'freebsd')
- ::File.join(cache_dir, 'proxy')
- else
- ::File.join(cache_dir, 'mod_cache_disk')
- end
- end
-
- def lock_dir
- case node['platform_family']
- when 'debian'
- '/var/lock/apache2'
- when 'freebsd'
- '/var/run'
- else
- '/var/run/httpd'
- end
- end
-
- def default_docroot_dir
- case node['platform_family']
- when 'arch'
- '/srv/http'
- when 'freebsd'
- '/usr/local/www/apache24/data'
- when 'suse'
- '/srv/www/htdocs'
- else
- '/var/www/html'
- end
- end
-
- def default_cgibin_dir
- case node['platform_family']
- when 'debian'
- '/usr/www/cgi-bin'
- when 'arch'
- '/usr/share/httpd/cgi-bin'
- when 'freebsd'
- '/usr/lib/cgi-bin'
- else
- '/var/www/cgi-bin'
- end
- end
-
- def default_run_dir
- case node['platform_family']
- when 'debian'
- '/var/run/apache2'
- when 'freebsd'
- '/var/run'
- else
- '/var/run/httpd'
- end
- end
-
- def default_apache_user
- case node['platform_family']
- when 'suse'
- 'wwwrun'
- when 'debian'
- 'www-data'
- when 'arch'
- 'http'
- when 'freebsd'
- 'www'
- else
- 'apache'
- end
- end
-
- def default_apache_group
- case node['platform_family']
- when 'suse', 'freebsd'
- 'www'
- when 'debian'
- 'www-data'
- when 'arch'
- 'http'
- else
- 'apache'
- end
- end
-
- def default_modules
- default_modules = %w(status alias auth_basic authn_core authn_file authz_core authz_groupfile
- authz_host authz_user autoindex deflate dir env mime negotiation setenvif)
-
- case node['platform_family']
- when 'rhel', 'fedora', 'amazon'
- default_modules.concat %w(log_config logio unixd systemd)
- when 'arch', 'freebsd'
- default_modules.concat %w(log_config logio unixd)
- when 'suse'
- default_modules.concat %w(log_config logio)
- else
- default_modules
- end
- end
-
- def default_mpm
- case node['platform']
- when 'debian'
- 'worker'
- when 'linuxmint', 'ubuntu'
- 'event'
- else
- 'prefork'
- end
- end
-
- def default_error_log
- platform_family?('freebsd') ? 'httpd-error.log' : 'error.log'
- end
-
- def default_access_log
- platform_family?('freebsd') ? 'httpd-access.log' : 'access.log'
- end
-
- def default_mime_magic_file
- ::File.join(apache_dir, 'magic')
- end
-
- def apache_pid_file
- case node['platform_family']
- when 'suse'
- '/var/run/httpd2.pid'
- when 'debian'
- '/var/run/apache2/apache2.pid'
- when 'freebsd'
- '/var/run/httpd.pid'
- else
- '/var/run/httpd/httpd.pid'
- end
- end
-
- def conf_enabled?(new_resource)
- ::File.symlink?("#{apache_dir}/conf-enabled/#{new_resource.name}.conf")
- end
-
- def mod_enabled?(new_resource)
- ::File.symlink?("#{apache_dir}/mods-enabled/#{new_resource.name}.load")
- end
-
- def apache_site_enabled?(site_name)
- ::File.symlink?("#{apache_dir}/sites-enabled/#{site_name}.conf")
- end
-
- def apache_site_available?(site_name)
- ::File.exist?("#{apache_dir}/sites-available/#{site_name}.conf")
- end
-
- def apache_devel_package(mpm)
- case node['platform_family']
- when 'amazon'
- if node['platform_version'].to_i == 2
- 'httpd-devel'
- else
- 'httpd24-devel'
- end
- when 'debian'
- if mpm == 'prefork'
- 'apache2-prefork-dev'
- else
- 'apache2-dev'
- end
- when 'suse'
- 'apache2-devel'
- else
- 'httpd-devel'
- end
- end
-
- def default_pass_phrase_dialog
- platform?('ubuntu') ? 'exec:/usr/share/apache2/ask-for-passphrase' : 'builtin'
- end
-
- def default_session_cache
- case node['platform_family']
- when 'freebsd'
- 'shmcb:/var/run/ssl_scache(512000)'
- when 'rhel', 'fedora', 'suse', 'amazon'
- 'shmcb:/var/cache/mod_ssl/scache(512000)'
- else
- 'shmcb:/var/run/apache2/ssl_scache'
- end
- end
-
- def config_file?(mod_name)
- if %w(ldap
- actions
- alias
- auth_cas
- autoindex
- cache_disk
- cgid
- dav_fs
- deflate
- dir
- fastcgi
- fcgid
- include
- info
- ldap
- mime_magic
- mime
- negotiation
- pagespeed
- proxy_balancer
- proxy_ftp
- proxy
- reqtimeout
- setenvif
- ssl
- status
- userdir
- mpm_event
- mpm_prefork
- mpm_worker
- ).include?(mod_name)
- true
- else
- false
- end
- end
-
- def pagespeed_url
- suffix = platform_family?('rhel', 'fedora', 'amazon') ? 'rpm' : 'deb'
-
- if node['kernel']['machine'] =~ /^i[36']86$/
- "https://dl-ssl.google.com/dl/linux/direct/mod-pagespeed-stable_current_i386.#{suffix}"
- else
- "https://dl-ssl.google.com/dl/linux/direct/mod-pagespeed-stable_current_amd64.#{suffix}"
- end
- end
-
- def default_site_template_source
- platform_family?('debian') ? "#{default_site_name}.conf.erb" : 'welcome.conf.erb'
- end
-
- # mod_php
- def apache_mod_php_package
- case node['platform_family']
- when 'debian'
- 'libapache2-mod-php'
- when 'rhel', 'amazon'
- 'mod_php'
- when 'suse'
- 'apache2-mod_php7'
- end
- end
-
- def apache_mod_php_modulename
- case node['platform_family']
- when 'amazon'
- 'php5_module'
- when 'rhel'
- if node['platform_version'].to_i >= 8
- 'php7_module'
- else
- 'php5_module'
- end
- else
- 'php7_module'
- end
- end
-
- def apache_mod_php_filename
- case node['platform_family']
- when 'debian'
- if platform?('debian') && node['platform_version'].to_i == 10
- 'libphp7.3.so'
- elsif platform?('debian') && node['platform_version'].to_i >= 11
- 'libphp7.4.so'
- elsif platform?('ubuntu') && node['platform_version'].to_f == 18.04
- 'libphp7.2.so'
- elsif platform?('ubuntu') && node['platform_version'].to_f >= 20.04
- 'libphp7.4.so'
- else
- 'libphp7.0.so'
- end
- when 'rhel'
- if node['platform_version'].to_i >= 8
- 'libphp7.so'
- else
- 'libphp5.so'
- end
- when 'amazon'
- 'libphp5.so'
- when 'suse'
- 'mod_php7.so'
- end
- end
-
- # mod_wsgi
- def apache_mod_wsgi_package
- case node['platform_family']
- when 'debian'
- 'libapache2-mod-wsgi-py3'
- when 'amazon'
- 'mod_wsgi'
- when 'rhel', 'fedora'
- if node['platform_version'].to_i >= 8
- 'python3-mod_wsgi'
- else
- 'mod_wsgi'
- end
- when 'suse'
- 'apache2-mod_wsgi-python3'
- end
- end
-
- def apache_mod_wsgi_filename
- if platform_family?('rhel', 'fedora') && node['platform_version'].to_i >= 8
- 'mod_wsgi_python3.so'
- else
- 'mod_wsgi.so'
- end
- end
-
- def apache_mod_auth_cas_install_method
- if (platform_family?('rhel') && node['platform_version'].to_i >= 8) \
- || platform_family?('suse') || platform_family?('fedora')
- 'source'
- else
- 'package'
- end
- end
-
- def apache_mod_auth_cas_devel_packages
- if platform_family?('rhel', 'amazon', 'fedora')
- %w(openssl-devel libcurl-devel pcre-devel libtool)
- elsif platform_family?('debian')
- %w(libssl-dev libcurl4-openssl-dev libpcre++-dev libtool)
- elsif platform_family?('suse')
- %w(libopenssl-devel libcurl-devel pcre-devel libtool)
- end
- end
- end
- end
-end
-
-Chef::DSL::Recipe.include Apache2::Cookbook::Helpers
-Chef::Resource.include Apache2::Cookbook::Helpers
diff --git a/lc-gdn-chef/cookbooks/apache2/metadata.json b/lc-gdn-chef/cookbooks/apache2/metadata.json
deleted file mode 100644
index 454f08f4c8c35cbe306eed46c968e1e5a81e5cd1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/metadata.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "apache2",
- "description": "Installs and configures apache2",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "debian": ">= 0.0.0",
- "ubuntu": ">= 0.0.0",
- "redhat": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "fedora": ">= 0.0.0",
- "amazon": ">= 0.0.0",
- "scientific": ">= 0.0.0",
- "freebsd": ">= 0.0.0",
- "suse": ">= 0.0.0",
- "opensuse": ">= 0.0.0",
- "opensuseleap": ">= 0.0.0",
- "arch": ">= 0.0.0"
- },
- "dependencies": {
- "yum-epel": ">= 0.0.0"
- },
- "providing": {
- },
- "recipes": {
- },
- "version": "8.14.3",
- "source_url": "https://github.com/sous-chefs/apache2",
- "issues_url": "https://github.com/sous-chefs/apache2/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 15.3"
- ]
- ],
- "ohai_versions": [
- ],
- "gems": [
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/apache2/metadata.rb b/lc-gdn-chef/cookbooks/apache2/metadata.rb
deleted file mode 100644
index 938144c952d18eb7db92a74e144c858ecd3348a6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/metadata.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-name 'apache2'
-source_url 'https://github.com/sous-chefs/apache2'
-issues_url 'https://github.com/sous-chefs/apache2/issues'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-chef_version '>= 15.3'
-license 'Apache-2.0'
-description 'Installs and configures apache2'
-version '8.14.3'
-
-depends 'yum-epel'
-
-supports 'debian'
-supports 'ubuntu'
-supports 'redhat'
-supports 'centos'
-supports 'fedora'
-supports 'amazon'
-supports 'scientific'
-supports 'freebsd'
-supports 'suse'
-supports 'opensuse'
-supports 'opensuseleap'
-supports 'arch'
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/conf.rb b/lc-gdn-chef/cookbooks/apache2/resources/conf.rb
deleted file mode 100644
index 507278760c89a56d550134af96add34f58bc4eaa..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/conf.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-unified_mode true
-
-property :path, String,
- default: lazy { "#{apache_dir}/conf-available" },
- description: 'Path to the conf-available directory'
-
-property :root_group, String,
- default: lazy { node['root_group'] },
- description: ''
-
-property :template_cookbook, String,
- default: 'apache2',
- description: 'Cookbook to source the template from. Override this to provide your own template'
-
-property :options, Hash,
- default: {
- server_tokens: 'Prod',
- server_signature: 'On',
- trace_enable: 'Off',
- },
- description: 'A hash to pass to the template'
-
-action :enable do
- template ::File.join(new_resource.path, "#{new_resource.name}.conf") do
- cookbook new_resource.template_cookbook
- owner 'root'
- group new_resource.root_group
- backup false
- mode '0644'
- variables new_resource.options.merge({ apache_dir: apache_dir })
- notifies :restart, 'service[apache2]', :delayed
- end
-
- execute "a2enconf #{new_resource.name}" do
- command "/usr/sbin/a2enconf #{new_resource.name}"
- notifies :restart, 'service[apache2]', :delayed
- not_if { conf_enabled?(new_resource) }
- end
-end
-
-action :disable do
- execute "a2disconf #{new_resource.name}" do
- command "/usr/sbin/a2disconf #{new_resource.name}"
- notifies :reload, 'service[apache2]', :delayed
- only_if { conf_enabled?(new_resource) }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/config.rb b/lc-gdn-chef/cookbooks/apache2/resources/config.rb
deleted file mode 100644
index 4944e85cd3b5ba0cc99319338124303856e9094d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/config.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-unified_mode true
-
-property :root_group, String,
- default: lazy { node['root_group'] },
- description: 'Group that the root user on the box runs as. Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :access_file_name, String,
- default: '.htaccess',
- description: 'String: Access filename'
-
-property :log_dir, String,
- default: lazy { default_log_dir },
- description: 'Log directory location. Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :error_log, String,
- default: lazy { default_error_log },
- description: 'Error log location. Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :log_level, String,
- default: 'warn',
- description: 'log level for apache2'
-
-property :apache_user, String,
- default: lazy { default_apache_user },
- description: 'Set to override the default apache2 user. Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_group, String,
- default: lazy { default_apache_group },
- description: 'Set to override the default apache2 user. Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :keep_alive, String,
- equal_to: %w(On Off),
- default: 'On',
- description: 'Persistent connection feature of HTTP/1.1 provide long-lived HTTP sessions'
-
-property :max_keep_alive_requests, Integer,
- default: 100,
- description: 'MaxKeepAliveRequests'
-
-property :keep_alive_timeout, Integer,
- default: 5,
- description: 'KeepAliveTimeout'
-
-property :docroot_dir, String,
- default: lazy { default_docroot_dir },
- description: 'Apache document root.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :timeout, [Integer, String],
- coerce: proc { |m| m.is_a?(Integer) ? m.to_s : m },
- default: 300,
- description: 'The number of seconds before receives and sends time out'
-
-property :server_name, String,
- default: 'localhost',
- description: 'Sets the ServerName directive'
-
-property :run_dir, String,
- default: lazy { default_run_dir },
- description: ' Sets the DefaultRuntimeDir directive.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :template_cookbook, String,
- default: 'apache2',
- description: 'Cookbook containing the template file'
-
-action :create do
- template 'apache2.conf' do
- if platform_family?('debian')
- path "#{apache_conf_dir}/apache2.conf"
- else
- path "#{apache_conf_dir}/httpd.conf"
- end
- action :create
- source 'apache2.conf.erb'
- cookbook new_resource.template_cookbook
- owner 'root'
- group new_resource.root_group
- mode '0640'
- variables(
- access_file_name: new_resource.access_file_name,
- apache_binary: apache_binary,
- apache_dir: apache_dir,
- apache_user: new_resource.apache_user,
- apache_group: new_resource.apache_group,
- docroot_dir: new_resource.docroot_dir,
- error_log: new_resource.error_log,
- keep_alive: new_resource.keep_alive,
- keep_alive_timeout: new_resource.keep_alive_timeout,
- lock_dir: lock_dir,
- log_dir: new_resource.log_dir,
- log_level: new_resource.log_level,
- max_keep_alive_requests: new_resource.max_keep_alive_requests,
- pid_file: apache_pid_file,
- run_dir: new_resource.run_dir,
- timeout: new_resource.timeout,
- server_name: new_resource.server_name
- )
- notifies :enable, 'service[apache2]', :delayed
- notifies :restart, 'service[apache2]', :delayed
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/default_site.rb b/lc-gdn-chef/cookbooks/apache2/resources/default_site.rb
deleted file mode 100644
index 674be9e9195b688cb38c5245c92804be3f3c9659..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/default_site.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-unified_mode true
-
-property :default_site_name, String,
- default: 'default-site',
- description: 'The default site name'
-
-property :site_action, [String, Symbol],
- default: :enable,
- coerce: proc { |m| m.is_a?(String) ? m.to_i : m },
- equal_to: %i( enable disable),
- description: 'Enable the site. Allows you to place all the configuration on disk but not enable the site'
-
-property :port, String,
- default: '80',
- description: 'Listen port'
-
-property :template_cookbook, String,
- default: 'apache2',
- description: 'Cookbook to source the template file from'
-
-property :server_admin, String,
- default: 'root@localhost',
- description: 'Default site contact name'
-
-property :log_level, String,
- default: 'warn',
- description: 'Log level for apache2'
-
-property :log_dir, String,
- default: lazy { default_log_dir },
- description: 'Default Apache2 log directory'
-
-property :docroot_dir, String,
- default: lazy { default_docroot_dir },
- description: 'Apache document root.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_root_group, String,
- default: lazy { node['root_group'] },
- description: 'Group that the root user on the box runs as.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :template_source, String,
- default: lazy { default_site_template_source },
- description: 'Source for the template.'
-
-property :variables, Hash,
- default: {},
- description: 'A hash to pass to the template'
-
-action :enable do
- template "#{new_resource.default_site_name}.conf" do
- source new_resource.template_source
- path "#{apache_dir}/sites-available/#{new_resource.default_site_name}.conf"
- owner 'root'
- group new_resource.apache_root_group
- mode '0644'
- cookbook new_resource.template_cookbook
- variables new_resource.variables.merge({
- access_log: default_access_log,
- cgibin_dir: default_cgibin_dir,
- docroot_dir: new_resource.docroot_dir,
- error_log: default_error_log,
- log_dir: default_log_dir,
- log_level: new_resource.log_level,
- port: new_resource.port,
- server_admin: new_resource.server_admin,
- site_name: new_resource.default_site_name,
- })
- end
-
- apache2_site new_resource.default_site_name do
- action new_resource.site_action
- end
-end
-
-action :disable do
- template "#{new_resource.default_site_name}.conf" do
- path "#{apache_dir}/sites-available/#{new_resource.default_site_name}.conf"
- source 'default-site.conf.erb'
- owner 'root'
- group new_resource.apache_root_group
- mode '0644'
- cookbook new_resource.template_cookbook
- action :delete
- end
-
- apache2_site new_resource.default_site_name do
- action :disable
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/install.rb b/lc-gdn-chef/cookbooks/apache2/resources/install.rb
deleted file mode 100644
index 259c8e144ef1617ba53a4702f42ff984621a874a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/install.rb
+++ /dev/null
@@ -1,406 +0,0 @@
-unified_mode true
-
-property :apache_pkg, String,
- default: lazy { default_apache_pkg },
- description: 'Name of the apache package to install.
-Defaults to platform specific names, see libraries/helpers.rb'
-
-property :apache_version, String,
- default: '',
- description: 'Version of the apache package to install.
-Defaults to the newest available.'
-
-property :root_group, String,
- default: lazy { node['root_group'] },
- description: 'Group that the root user on the box runs as.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_user, String,
- default: lazy { default_apache_user },
- description: 'Set to override the default apache2 user.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_group, String,
- default: lazy { default_apache_group },
- description: 'Set to override the default apache2 user.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :log_dir, String,
- default: lazy { default_log_dir },
- description: 'Log directory location.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :error_log, String,
- default: lazy { default_error_log },
- description: 'Error log location.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :log_level, String,
- default: 'warn',
- description: 'Log level for apache2'
-
-property :apache_locale, String,
- default: 'system',
- description: 'Locale for apache2, defaults to the system locale'
-
-property :status_url, String,
- default: 'http://localhost:80/server-status',
- description: 'URL for status checks'
-
-property :server_name, String,
- default: 'localhost',
- description: 'Sets the ServerName directive'
-
-property :server_signature, String,
- equal_to: %w(On Off EMail),
- default: 'On',
- description: 'Sets the ServerSignature directive'
-
-property :server_tokens, String,
- equal_to: %w(Major Minor Min Minimal Prod ProductOnly OS Full),
- default: 'Prod',
- description: 'Sets the ServerTokens directive'
-
-property :trace_enable, String,
- equal_to: %w(On Off extended),
- default: 'Off',
- description: 'Sets the TraceEnable directive'
-
-property :default_charset, [String, Array],
- coerce: proc { |p| p.is_a?(Array) ? p : Array(p) },
- description: 'Sets the AddDefaultCharset directive for each element provided'
-
-property :httpd_t_timeout, Integer,
- default: 10,
- description: 'Service timeout setting. Defaults to 10 seconds'
-
-property :mpm, String,
- default: lazy { default_mpm },
- description: 'Multi-processing Module.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :mpm_conf, Hash,
- default: {},
- description: 'Multi-processing Module configuration options.'
-
-property :modules, [String, Array],
- default: lazy { default_modules },
- description: 'List of default modules activated.'
-
-property :mod_conf, Hash,
- default: {},
- description: 'other default modules optional configuration, passed with an Hash of Hash using the module name as key.'
-
-property :listen, [String, Array],
- default: %w(80 443),
- coerce: proc { |p| p.is_a?(Array) ? p : Array(p) },
- description: 'Port to listen on. Defaults to both 80 & 443'
-
-property :keep_alive, String,
- equal_to: %w(On Off),
- default: 'On',
- description: 'Persistent connection feature of HTTP/1.1 provide long-lived HTTP sessions'
-
-property :max_keep_alive_requests, Integer,
- default: 100,
- description: 'MaxKeepAliveRequests'
-
-property :keep_alive_timeout, Integer,
- default: 5,
- description: 'KeepAliveTimeout'
-
-property :docroot_dir, String,
- default: lazy { default_docroot_dir },
- description: 'Apache document root.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :run_dir, String,
- default: lazy { default_run_dir },
- description: 'Location for APACHE_RUN_DIR.'\
-'Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :access_file_name, String,
- default: '.htaccess',
- description: 'Access filename'
-
-property :timeout, [Integer, String],
- coerce: proc { |m| m.is_a?(Integer) ? m.to_s : m },
- default: 300,
- description: 'The number of seconds before receives and sends time out'
-
-property :envvars_additional_params, Hash,
- description: 'Hash of additional environment variables to add to the envvars file'
-
-property :sysconfig_additional_params, Hash,
- description: 'Hash of additional sysconfig parameters to apply to the system'
-
-property :template_cookbook, String,
- default: 'apache2',
- description: 'Cookbook to source the template from. Override this to provide your own template'
-
-action :install do
- package [new_resource.apache_pkg, perl_pkg] do
- version [new_resource.apache_version, nil] unless new_resource.apache_version.empty?
- end
- # Disabling for now as we don't have Fedora support (right now)
- # package 'perl-Getopt-Long-Descriptive' if platform?('fedora')
-
- # Setup the config directories as per Debian for easier adding and removing config
- # If we didn't do this, we would need to use the line cookbook to remove and add config from
- # a single file.
- directory apache_dir do
- mode '0750'
- owner 'root'
- group new_resource.root_group
- end
-
- %w(sites-available sites-enabled mods-available mods-enabled conf-available conf-enabled).each do |dir|
- directory "#{apache_dir}/#{dir}" do
- mode '0750'
- owner 'root'
- group new_resource.root_group
- end
- end
-
- %w( conf.d conf.modules.d ).each do |dir|
- directory "#{apache_dir}/#{dir}" do
- recursive true
- action :delete
- end
- end
-
- directory new_resource.log_dir do
- mode '0750'
- recursive true
- end
-
- template '/usr/sbin/a2enmod' do
- source 'a2enmod.erb'
- cookbook 'apache2'
- mode '0700'
- owner 'root'
- variables(
- apachectl: apachectl,
- apache_dir: apache_dir,
- lib_dir: lib_dir
- )
- group new_resource.root_group
- action :create
- end
-
- %w(a2ensite a2dissite a2dismod a2enconf a2disconf).each do |modscript|
- link "/usr/sbin/#{modscript}" do
- to '/usr/sbin/a2enmod'
- end
- end
-
- unless platform_family?('debian')
- cookbook_file '/usr/local/bin/apache2_module_conf_generate.pl' do
- source 'apache2_module_conf_generate.pl'
- cookbook 'apache2'
- mode '0750'
- owner 'root'
- group new_resource.root_group
- end
-
- with_run_context :root do
- execute 'generate-module-list' do
- command "/usr/local/bin/apache2_module_conf_generate.pl #{lib_dir} #{apache_dir}/mods-available"
- action :nothing
- end
- end
- end
-
- if platform_family?('freebsd')
- directory "#{apache_dir}/Includes" do
- action :delete
- recursive true
- end
-
- directory "#{apache_dir}/extra" do
- action :delete
- recursive true
- end
- end
-
- if platform_family?('suse')
- directory "#{apache_dir}/vhosts.d" do
- action :delete
- recursive true
- end
-
- %w(charset.conv default-vhost.conf default-server.conf default-vhost-ssl.conf errors.conf listen.conf mime.types mod_autoindex-defaults.conf mod_info.conf mod_log_config.conf mod_status.conf mod_userdir.conf mod_usertrack.conf uid.conf).each do |file|
- file "#{apache_dir}/#{file}" do
- action :delete
- backup false
- end
- end
- end
-
- directory "#{apache_dir}/ssl" do
- mode '0750'
- owner 'root'
- group new_resource.root_group
- end
-
- directory cache_dir do
- mode '0755'
- owner 'root'
- group new_resource.root_group
- end
-
- directory lock_dir do
- mode '0750'
- if platform_family?('debian')
- owner new_resource.apache_user
- else
- owner 'root'
- end
- group new_resource.root_group
- end
-
- template "/etc/sysconfig/#{apache_platform_service_name}" do
- source 'etc-sysconfig-httpd.erb'
- cookbook 'apache2'
- owner 'root'
- group new_resource.root_group
- mode '0644'
- variables(
- apache_binary: apache_binary,
- apache_dir: apache_dir,
- sysconfig_additional_params: new_resource.sysconfig_additional_params
- )
- only_if { platform_family?('rhel', 'amazon', 'fedora', 'suse') }
- end
-
- template "#{apache_dir}/envvars" do
- source 'envvars.erb'
- cookbook 'apache2'
- owner 'root'
- group new_resource.root_group
- mode '0644'
- variables(
- lock_dir: lock_dir,
- log_dir: new_resource.log_dir,
- apache_user: new_resource.apache_user,
- apache_group: new_resource.apache_group,
- pid_file: apache_pid_file,
- apache_locale: new_resource.apache_locale,
- status_url: new_resource.status_url,
- run_dir: new_resource.run_dir,
- envvars_additional_params: new_resource.envvars_additional_params
- )
- only_if { platform_family?('debian') }
- end
-
- apache2_config 'apache2.conf' do
- access_file_name new_resource.access_file_name
- log_dir new_resource.log_dir
- error_log new_resource.error_log
- log_level new_resource.log_level
- apache_user new_resource.apache_user
- apache_group new_resource.apache_group
- keep_alive new_resource.keep_alive
- max_keep_alive_requests new_resource.max_keep_alive_requests
- keep_alive_timeout new_resource.keep_alive_timeout
- docroot_dir new_resource.docroot_dir
- timeout new_resource.timeout
- server_name new_resource.server_name
- template_cookbook new_resource.template_cookbook
- end
-
- apache2_conf 'security' do
- options(
- server_signature: new_resource.server_signature,
- server_tokens: new_resource.server_tokens,
- trace_enable: new_resource.trace_enable
- )
- end
-
- apache2_conf 'charset' do
- options(
- default_charset: new_resource.default_charset
- )
- end
-
- template 'ports.conf' do
- path "#{apache_dir}/ports.conf"
- cookbook 'apache2'
- mode '0644'
- variables(listen: new_resource.listen)
- notifies :restart, 'service[apache2]', :delayed
- end
-
- # MPM Support Setup
- case new_resource.mpm
- when 'event'
- if platform_family?('suse')
- package %w(apache2-prefork apache2-worker) do
- action :remove
- end
-
- package 'apache2-event'
- else
- %w(mpm_prefork mpm_worker).each do |mpm|
- apache2_module mpm do
- action :disable
- end
- end
-
- apache2_module 'mpm_event' do
- mod_conf new_resource.mpm_conf || new_resource.mod_conf[:mpm_event]
- apache_service_notification :restart
- end
- end
-
- when 'prefork'
- if platform_family?('suse')
- package %w(apache2-event apache2-worker) do
- action :remove
- end
-
- package 'apache2-prefork'
- else
- %w(mpm_event mpm_worker).each do |mpm|
- apache2_module mpm do
- action :disable
- end
- end
-
- apache2_module 'mpm_prefork' do
- mod_conf new_resource.mpm_conf || new_resource.mod_conf[:mpm_prefork]
- apache_service_notification :restart
- end
- end
-
- when 'worker'
- if platform_family?('suse')
- package %w(apache2-event apache2-prefork) do
- action :remove
- end
-
- package 'apache2-worker'
- else
- %w(mpm_prefork mpm_event).each do |mpm|
- apache2_module mpm do
- action :disable
- end
- end
-
- apache2_module 'mpm_worker' do
- mod_conf new_resource.mpm_conf || new_resource.mod_conf[:mpm_worker]
- apache_service_notification :restart
- end
- end
- end
-
- new_resource.modules.each do |mod|
- apache2_module mod do
- mod_conf new_resource.mod_conf[mod.to_sym]
- end
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod.rb
deleted file mode 100644
index 0df268abe548dd321ef3d78f51bb6fb91cd0d26e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-unified_mode true
-
-property :template, String,
- name_property: true,
- description: 'Name of the template '
-
-property :root_group, String,
- default: lazy { node['root_group'] },
- description: 'Set to override the platforms default root group for the template file'
-
-property :template_cookbook, String,
- default: 'apache2',
- description: 'Cookbook containing the template file'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', "#{new_resource.template}.conf") do
- source "mods/#{new_resource.template}.conf.erb"
- cookbook new_resource.template_cookbook
- owner 'root'
- group new_resource.root_group
- mode '0644'
- variables(apache_dir: apache_dir)
- notifies :reload, 'service[apache2]', :delayed
- action :create
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_actions.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_actions.rb
deleted file mode 100644
index caddf117204310f177f7b71411ec511cf0e3ed50..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_actions.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-unified_mode true
-
-property :actions, Hash,
- default: {},
- description: 'A hash of actions, key: action-type value: cgi-script e.g. { news-handler: \'"/cgi-bin/news.cgi" virtual\' } '
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'actions.conf') do
- source 'mods/actions.conf.erb'
- cookbook 'apache2'
- variables(actions: new_resource.actions)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_alias.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_alias.rb
deleted file mode 100644
index 12c217d7b092c6174a1b1db44caf1e4ffcf45213..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_alias.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-unified_mode true
-
-property :options, Array,
- default: %w(Indexes MultiViews SymLinksIfOwnerMatch),
- description: 'Alias options'
-
-property :icondir, String,
- default: lazy { icon_dir },
- description: 'The icon directory
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :allow_override, Array,
- default: %w(None),
- description: 'For full description see https://httpd.apache.org/docs/2.4/mod/core.html#allowoverride'
-
-property :require, String,
- default: 'all granted',
- description: 'For full description see https://httpd.apache.org/docs/2.4/mod/mod_authz_core.html#require'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'alias.conf') do
- source 'mods/alias.conf.erb'
- cookbook 'apache2'
- variables(
- icondir: new_resource.icondir,
- options: new_resource.options,
- allow_override: new_resource.allow_override,
- require: new_resource.require
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_auth_cas.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_auth_cas.rb
deleted file mode 100644
index e1c634d973c66820ecb3b052ecf367eecc29f2ac..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_auth_cas.rb
+++ /dev/null
@@ -1,133 +0,0 @@
-unified_mode true
-
-property :name, String, default: ''
-
-property :install_method, String,
- equal_to: %w( source package ),
- default: lazy { apache_mod_auth_cas_install_method },
- description: 'Install method for Mod auth CAS'
-
-property :source_revision, String,
- default: 'v1.2',
- description: 'Revision for the mod auth cas source install'
-
-property :source_checksum, String,
- default: 'b05a194f6c255f65a10537242648d8c0c2110960c03aff240bd8f52eaa454c29',
- description: 'Checksum for the mod auth cas source install'
-
-property :login_url, String,
- default: 'https://login.example.org/cas/login',
- description: 'The URL to redirect users to when they attempt to access a CAS protected resource and do not have an existing session.'
-
-property :validate_url, String,
- default: 'https://login.example.org/cas/serviceValidate',
- description: 'The URL to use when validating a ticket presented by a client'
-
-property :root_group, String,
- default: lazy { node['root_group'] },
- description: 'Group that the root user on the box runs as.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_user, String,
- default: lazy { default_apache_user },
- description: 'Set to override the default apache2 user.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :apache_group, String,
- default: lazy { default_apache_group },
- description: 'Set to override the default apache2 user.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :mpm, String,
- default: lazy { default_mpm },
- description: 'Apache2 MPM: used to determine which devel package to install on Debian'
-
-property :directives, Hash,
- description: 'Hash of optional directives to pass to the mod_auth_cas module configuration'
-
-action :install do
- if new_resource.install_method.eql? 'source'
- package [apache_devel_package(new_resource.mpm), apache_mod_auth_cas_devel_packages].flatten
-
- build_essential 'mod_auth_cas'
-
- mod_auth_cas_tarball = "#{Chef::Config[:file_cache_path]}/mod_auth_cas.tar.gz"
-
- remote_file mod_auth_cas_tarball do
- source "https://github.com/apereo/mod_auth_cas/archive/#{new_resource.source_revision}.tar.gz"
- checksum new_resource.source_checksum
- end
-
- archive_file mod_auth_cas_tarball do
- destination "#{Chef::Config[:file_cache_path]}/mod_auth_cas"
- notifies :run, 'execute[compile mod_auth_cas]', :immediately
- end
-
- execute 'compile mod_auth_cas' do
- command 'autoreconf -ivf && ./configure && make && make install'
- cwd "#{Chef::Config[:file_cache_path]}/mod_auth_cas/mod_auth_cas-#{new_resource.source_revision.gsub(/^v/, '')}"
- not_if { ::File.exist?("#{apache_libexec_dir}/mod_auth_cas.so") }
- end
-
- template "#{apache_dir}/mods-available/auth_cas.load" do
- cookbook 'apache2'
- source 'mods/auth_cas.load.erb'
- owner 'root'
- group new_resource.root_group
- variables(cache_dir: cache_dir, libexec_dir: apache_libexec_dir)
- mode '0644'
- end
- else
-
- case node['platform_family']
- when 'debian'
- package 'libapache2-mod-auth-cas'
- when 'rhel', 'fedora', 'amazon'
- include_recipe 'yum-epel' unless platform_family?('fedora')
-
- package 'mod_auth_cas' do
- notifies :run, 'execute[generate-module-list]', :immediately
- notifies :delete, 'directory[purge distro conf.modules.d]', :immediately
- notifies :delete, 'directory[purge distro conf.d]', :immediately
- end
-
- directory 'purge distro conf.modules.d' do
- path "#{apache_dir}/conf.modules.d"
- recursive true
- action :nothing
- end
-
- directory 'purge distro conf.d' do
- path "#{apache_dir}/conf.d"
- recursive true
- action :nothing
- end
-
- file "#{apache_dir}/conf.d/auth_cas.conf" do
- content '# conf is under mods-available/auth_cas.conf - apache2 cookbook\n'
- only_if { ::Dir.exist?("#{apache_dir}/conf.d") }
- end
- end
- end
-
- apache2_module 'auth_cas' do
- template_cookbook 'apache2'
- mod_conf(
- cache_dir: cache_dir,
- login_url: new_resource.login_url,
- validate_url: new_resource.validate_url,
- directives: new_resource.directives
- )
- notifies :reload, 'service[apache2]', :delayed
- end
-
- directory "#{cache_dir}/mod_auth_cas" do
- owner new_resource.apache_user
- group new_resource.apache_group
- mode '0700'
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_autoindex.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_autoindex.rb
deleted file mode 100644
index 6a2377b73785760c1104f187c62efdec2459b383..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_autoindex.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-unified_mode true
-
-property :index_options, Array,
- default: %w(FancyIndexing VersionSort HTMLTable NameWidth=* DescriptionWidth=* Charset=UTF-8),
- description: 'An array of directory indexing options. For more inforamtion see https://httpd.apache.org/docs/2.4/mod/mod_autoindex.html#indexoptions'
-
-property :readme_name, String,
- default: 'README.html',
- description: 'Name of the file that will be inserted at the end of the index listing. For more information see https://httpd.apache.org/docs/2.4/mod/mod_autoindex.html#readmename'
-
-property :header_name, String,
- default: 'HEADER.html',
- description: 'Header name. For more information see https://httpd.apache.org/docs/2.4/mod/mod_autoindex.html#headername'
-
-property :index_ignore, String,
- default: '.??* *~ *# RCS CVS *,v *,t',
- description: 'Adds to the list of files to hide when listing a directory. For more information see https://httpd.apache.org/docs/2.4/mod/mod_autoindex.html#indexignore'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'autoindex.conf') do
- source 'mods/autoindex.conf.erb'
- cookbook 'apache2'
- variables(
- header_name: new_resource.header_name,
- index_options: new_resource.index_options,
- index_ignore: new_resource.index_ignore,
- readme_name: new_resource.readme_name
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_cache_disk.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_cache_disk.rb
deleted file mode 100644
index aace34842d1022002150e2a094132f781440e5cf..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_cache_disk.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-unified_mode true
-
-property :cache_root, String,
- default: lazy { default_cache_root },
- description: 'Root directory to keep the cache.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-property :cache_dir_levels, String,
- default: '2',
- description: 'https://httpd.apache.org/docs/2.4/mod/mod_cache_disk.html#cachedirlevels'
-
-property :cache_dir_length, String,
- default: '2',
- description: 'https://httpd.apache.org/docs/2.4/mod/mod_cache_disk.html#cachedirlength'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'cache_disk.conf') do
- source 'mods/cache_disk.conf.erb'
- cookbook 'apache2'
- variables(
- cache_root: new_resource.cache_root,
- cache_dir_levels: new_resource.cache_dir_levels,
- cache_dir_length: new_resource.cache_dir_length
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_cgid.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_cgid.rb
deleted file mode 100644
index 6cc1819330ce04dbcd99bbf2e7f63eb64f735c9f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_cgid.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-unified_mode true
-
-property :script_sock, String,
- default: lazy { ::File.join(default_run_dir, 'cgisock') }
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'cgid.conf') do
- source 'mods/cgid.conf.erb'
- cookbook 'apache2'
- variables(script_sock: new_resource.script_sock)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_dav_fs.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_dav_fs.rb
deleted file mode 100644
index 88d14755a75640f15e4fca1818167b1e00451f3f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_dav_fs.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-unified_mode true
-
-property :dav_lock_db, String,
- default: lazy { ::File.join(lock_dir, 'DAVLock') },
- description: 'LockDB file location.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'dav_fs.conf') do
- source 'mods/dav_fs.conf.erb'
- cookbook 'apache2'
- variables(dav_lock_db: new_resource.dav_lock_db)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_deflate.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_deflate.rb
deleted file mode 100644
index 64776d3c7c3a00018a96373cbac4e5c8ace358cf..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_deflate.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-unified_mode true
-
-property :add_output_filter_by_type, Hash,
- default: {
- 1 => 'DEFLATE text/html text/plain text/xml',
- 2 => 'DEFLATE text/css',
- 3 => 'DEFLATE application/x-javascript application/javascript application/ecmascript',
- 4 => 'DEFLATE application/rss+xml',
- 5 => 'DEFLATE application/xml',
- 6 => 'DEFLATE application/xhtml+xml',
- 7 => 'DEFLATE image/svg+xml',
- 8 => 'DEFLATE application/atom_xml',
- 9 => 'DEFLATE application/x-httpd-php',
- 10 => 'DEFLATE application/x-httpd-fastphp',
- 11 => 'DEFLATE application/x-httpd-eruby',
- },
- description: 'A hash of output filters, ordered by key number'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'deflate.conf') do
- source 'mods/deflate.conf.erb'
- cookbook 'apache2'
- variables(add_output_filter_by_type: new_resource.add_output_filter_by_type)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_dir.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_dir.rb
deleted file mode 100644
index 4afa84c101d6f9171546d83b267961429b35e31e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_dir.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-unified_mode true
-
-property :directory_index, Array,
- default: %w(
- index.html
- index.cgi
- index.pl
- index.php
- index.xhtml
- index.htm
- ),
- description: 'Array of directory indexes'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'dir.conf') do
- source 'mods/dir.conf.erb'
- cookbook 'apache2'
- variables(directory_index: new_resource.directory_index)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_fastcgi.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_fastcgi.rb
deleted file mode 100644
index ea3b336d361c1cc00f78ba667b49b799a22ed102..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_fastcgi.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-unified_mode true
-
-property :fast_cgi_wrapper, String,
- default: '',
- description: 'Defaults to an empty string'
-
-property :add_handler, Hash,
- default: { 1 => 'fastcgi-script .fcgi' },
- description: 'A key ordered hash of handlers'
-
-property :fast_cgi_ipc_dir, String,
- default: lazy { ::File.join(lib_dir, 'fastcgi') },
- description: 'FastCGI directory.
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'fastcgi.conf') do
- source 'mods/fastcgi.conf.erb'
- cookbook 'apache2'
- variables(
- fast_cgi_wrapper: new_resource.fast_cgi_wrapper,
- add_handler: new_resource.add_handler,
- fast_cgi_ipc_dir: new_resource.fast_cgi_ipc_dir
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_fcgid.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_fcgid.rb
deleted file mode 100644
index be6452f340681f4ea2256aecd9ab5fa58355b8e3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_fcgid.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-unified_mode true
-
-property :add_handler, Hash,
- default: { 1 => 'fcgid-script .fcgi' },
- description: 'A key ordered hash of handlers'
-
-property :ipc_connect_timeout, Integer,
- default: 20,
- description: 'IPC Connection Timeout in seconds'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'fcgid.conf') do
- source 'mods/fcgid.conf.erb'
- cookbook 'apache2'
- variables(
- add_handler: new_resource.add_handler,
- ipc_connect_timeout: new_resource.ipc_connect_timeout
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_include.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_include.rb
deleted file mode 100644
index e9bc749a1e7ae0eda3aeaa97f2b95054f7456387..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_include.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-unified_mode true
-
-property :add_type, Hash,
- default: { 1 => 'text/html .shtml' },
- description: ''
-
-property :add_output_filter, Hash,
- default: { 1 => 'INCLUDES .shtml' },
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'include.conf') do
- source 'mods/include.conf.erb'
- cookbook 'apache2'
- variables(
- add_type: new_resource.add_type,
- add_output_filter: new_resource.add_output_filter
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_info.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_info.rb
deleted file mode 100644
index 87559396c189a217ea2305485dd2a4f045d7fd0f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_info.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-unified_mode true
-
-property :info_allow_list, [String, Array],
- default: %w(127.0.0.1 ::1),
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'info.conf') do
- source 'mods/info.conf.erb'
- cookbook 'apache2'
- variables(info_allow_list: Array(new_resource.info_allow_list))
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_ldap.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_ldap.rb
deleted file mode 100644
index a48ca90170796db0819890340d34e862ff0d13b6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_ldap.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-unified_mode true
-
-property :location, String,
- default: '/ldap-status',
- description: ''
-
-property :set_handler, String,
- default: 'ldap-status',
- description: ''
-
-property :require, String,
- default: 'local',
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'ldap.conf') do
- source 'mods/ldap.conf.erb'
- cookbook 'apache2'
- variables(
- location: new_resource.location,
- set_handler: new_resource.set_handler,
- require: new_resource.require
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_mime.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_mime.rb
deleted file mode 100644
index 9faf1b0503a9e2dad9d70918158dddabbc77d26a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_mime.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-unified_mode true
-
-property :types_config, String,
- default: lazy { default_types_config },
- description: ''
-
-property :add_type, Hash,
- default: {
- 1 => 'text/html .shtml',
- 2 => 'application/x-compress .Z',
- 3 => 'application/x-gzip .gz .tgz',
- 4 => 'application/x-bzip2 .bz2',
- 5 => 'image/svg+xml svg svgz',
- },
- description: ''
-
-property :add_handler, Hash,
- default: { 1 => 'AddHandler type-map var' },
- description: ''
-
-property :add_output_filter, Hash,
- default: { 1 => 'INCLUDES .shtml' },
- description: ''
-
-property :add_encoding, Hash,
- default: { 1 => 'gzip svgz' },
- description: ''
-
-property :add_language, Hash,
- default: {},
- description: 'Not currently used'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'mime.conf') do
- source 'mods/mime.conf.erb'
- cookbook 'apache2'
- variables(
- types_config: new_resource.types_config,
- add_type: new_resource.add_type,
- add_handler: new_resource.add_handler,
- add_output_filter: new_resource.add_output_filter,
- add_encoding: new_resource.add_encoding
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_mime_magic.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_mime_magic.rb
deleted file mode 100644
index 39a5f439aad7262f4d365395c5cd450aa3e73e5e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_mime_magic.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-unified_mode true
-
-property :mime_magic_file, String,
- default: lazy { default_mime_magic_file },
- description: 'The location of the mime magic file
-Defaults to platform specific locations, see libraries/helpers.rb'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'mime_magic.conf') do
- source 'mods/mime_magic.conf.erb'
- cookbook 'apache2'
- variables(mime_magic_file: new_resource.mime_magic_file)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_event.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_event.rb
deleted file mode 100644
index d6ed4ab298f1dd71576c9a1c2f8f95cf952cfd18..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_event.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-unified_mode true
-
-property :startservers, Integer,
- default: 4,
- description: ''
-
-property :serverlimit, Integer,
- default: 16,
- description: ''
-
-property :minsparethreads, Integer,
- default: 64,
- description: ''
-
-property :maxsparethreads, Integer,
- default: 192,
- description: ''
-
-property :threadlimit, Integer,
- default: 192,
- description: ''
-
-property :threadsperchild, Integer,
- default: 64,
- description: ''
-
-property :maxrequestworkers, Integer,
- default: 1024,
- description: ''
-
-property :maxconnectionsperchild, Integer,
- default: 0,
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'mpm_event.conf') do
- source 'mods/mpm_event.conf.erb'
- cookbook 'apache2'
- variables(
- startservers: new_resource.startservers,
- serverlimit: new_resource.serverlimit,
- minsparethreads: new_resource.minsparethreads,
- maxsparethreads: new_resource.maxsparethreads,
- threadlimit: new_resource.threadlimit,
- threadsperchild: new_resource.threadsperchild,
- maxrequestworkers: new_resource.maxrequestworkers,
- maxconnectionsperchild: new_resource.maxconnectionsperchild
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_prefork.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_prefork.rb
deleted file mode 100644
index 61f24f112374b9d5c49f20a1ad41b5f4caf1c396..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_prefork.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-unified_mode true
-
-property :startservers, Integer,
- default: 16,
- description: 'number of server processes to start'
-
-property :minspareservers, Integer,
- default: 16,
- description: 'minimum number of server processes which are kept spare'
-
-property :maxspareservers, Integer,
- default: 32,
- description: 'maximum number of server processes which are kept spare'
-
-property :serverlimit, Integer,
- default: 256,
- description: ''
-
-property :maxrequestworkers, Integer,
- default: 256,
- description: 'maximum number of server processes allowed to start'
-
-property :maxconnectionsperchild, Integer,
- default: 10_000,
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'mpm_prefork.conf') do
- source 'mods/mpm_prefork.conf.erb'
- cookbook 'apache2'
- variables(
- startservers: new_resource.startservers,
- minspareservers: new_resource.minspareservers,
- maxspareservers: new_resource.maxspareservers,
- serverlimit: new_resource.serverlimit,
- maxrequestworkers: new_resource.maxrequestworkers,
- maxconnectionsperchild: new_resource.maxconnectionsperchild
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_worker.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_worker.rb
deleted file mode 100644
index 2b05edc86d1eddb0b94835c7fcdc513b159d4b70..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_mpm_worker.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-unified_mode true
-
-property :startservers, Integer,
- default: 4,
- description: 'initial number of server processes to start'
-
-property :minsparethreads, Integer,
- default: 64,
- description: 'minimum number of worker threads which are kept spare'
-
-property :maxsparethreads, Integer,
- default: 192,
- description: 'maximum number of worker threads which are kept spare'
-
-property :threadsperchild, Integer,
- default: 64,
- description: 'constant number of worker threads in each server process'
-
-property :maxrequestworkers, Integer,
- default: 1024,
- description: 'maximum number of threads'
-
-property :maxconnectionsperchild, Integer,
- default: 0,
- description: 'maximum number of requests a server process serves'
-
-property :threadlimit, Integer,
- default: 192,
- description: 'ThreadsPerChild can be changed to this maximum value during a graceful restart. ThreadLimit can only be changed by stopping and starting Apache.'
-
-property :serverlimit, Integer,
- default: 16,
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'mpm_worker.conf') do
- source 'mods/mpm_worker.conf.erb'
- cookbook 'apache2'
- variables(
- startservers: new_resource.startservers,
- minsparethreads: new_resource.minsparethreads,
- maxsparethreads: new_resource.maxsparethreads,
- threadsperchild: new_resource.threadsperchild,
- maxrequestworkers: new_resource.maxrequestworkers,
- maxconnectionsperchild: new_resource.maxconnectionsperchild,
- threadlimit: new_resource.threadlimit,
- serverlimit: new_resource.serverlimit
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_negotiation.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_negotiation.rb
deleted file mode 100644
index a5a974f0293e473f2d6e3de078a0c5d8399f00dd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_negotiation.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-unified_mode true
-
-property :language_priority, Array,
- default: %w( en ca cs da de el eo es et fr he hr it ja ko ltz nl nn no pl pt pt-BR ru sv tr zh-CN zh-TW),
- description: ''
-
-property :force_language_priority, String,
- default: 'Prefer Fallback',
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'negotiation.conf') do
- source 'mods/negotiation.conf.erb'
- cookbook 'apache2'
- variables(
- language_priority: new_resource.language_priority.join(' '),
- force_language_priority: new_resource.force_language_priority
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_pagespeed.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_pagespeed.rb
deleted file mode 100644
index 068a1a7b9d48aa2e3332d79fe2c700fd6b1e8023..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_pagespeed.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-unified_mode true
-
-property :apache_user, String,
- default: lazy { default_apache_user },
- description: ''
-
-property :apache_group, String,
- default: lazy { default_apache_group },
- description: ''
-
-property :mod_page_speed, String,
- equal_to: %w(on off),
- default: 'on',
- description: ''
-
-property :file_cache_path, String,
- default: '/var/cache/mod_pagespeed/',
- description: ''
-
-property :output_filters, Array,
- default: ['MOD_PAGESPEED_OUTPUT_FILTER text/html'],
- description: ''
-
-property :inherit_vhost_config, String,
- default: 'on',
- equal_to: %w(on off),
- description: ''
-
-property :rewrite_level, String,
- default: '',
- equal_to: ['', 'PassThrough', 'CoreFilters', 'TestingCoreFilters'],
- description: 'For full info see https://www.modpagespeed.com/doc/config_filters'
-
-property :disable_filters, String,
- default: '',
- description: 'For full info see https://www.modpagespeed.com/doc/filters'
-
-property :enable_filters, String,
- default: '',
- description: 'For full info see https://www.modpagespeed.com/doc/filters'
-
-property :domain, String,
- default: '',
- description: 'For full info see https://www.modpagespeed.com/doc/domains'
-
-property :extra_config, Hash,
- default: {},
- description: 'A hash of key value pairs for enable e.g. "ModPagespeedFileCacheSizeKb" => "102400"'
-
-property :file_cache_inode_limit, [String, Integer],
- default: '500000',
- coerce: proc { |m| m.is_a?(Integer) ? m.to_s : m },
- description: ''
-
-property :avoid_renaming_introspective_javascript, String,
- equal_to: %w(on off),
- default: 'on',
- description: ''
-
-property :library, Array,
- default: [
- '105527 ltVVzzYxo0 //ajax.googleapis.com/ajax/libs/prototype/1.6.1.0/prototype.js',
- '92501 J8KF47pYOq //ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js',
- '141547 GKjMUuF4PK //ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js',
- '43 1o978_K0_L http://www.modpagespeed.com/rewrite_javascript.js',
- ],
- description: 'Array of libraries to load in the form "bytes MD5 canonical_url" we prepend ModPagespeedLibrary'
-
-action :create do
- remote_file "#{Chef::Config[:file_cache_path]}/mod-pagespeed.deb" do
- source pagespeed_url
- mode '0644'
- action :create_if_missing
- end
-
- package 'mod_pagespeed' do
- source "#{Chef::Config[:file_cache_path]}/mod-pagespeed.deb"
- action :install
- end
-
- directory new_resource.file_cache_path do
- user new_resource.apache_user
- group new_resource.apache_group
- mode '0750'
- end
-
- template ::File.join(apache_dir, 'mods-available', 'pagespeed.conf') do
- source 'mods/pagespeed.conf.erb'
- cookbook 'apache2'
- variables(
- mod_page_speed: new_resource.mod_page_speed,
- file_cache_path: new_resource.file_cache_path,
- output_filters: new_resource.output_filters,
- inherit_vhost_config: new_resource.inherit_vhost_config,
- rewrite_level: new_resource.rewrite_level,
- disable_filters: new_resource.disable_filters,
- enable_filters: new_resource.enable_filters,
- domain: new_resource.domain,
- extra_config: new_resource.extra_config,
- file_cache_inode_limit: new_resource.file_cache_inode_limit,
- avoid_renaming_introspective_javascript: new_resource.avoid_renaming_introspective_javascript,
- library: new_resource.library
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_php.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_php.rb
deleted file mode 100644
index 41052e442562fbfab402937779d2016e83a2c2fe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_php.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-unified_mode true
-
-property :name, String, default: ''
-
-property :module_name, String,
- default: lazy { apache_mod_php_modulename },
- description: 'Module name for the Apache PHP module.'
-
-property :so_filename, String,
- default: lazy { apache_mod_php_filename },
- description: 'Filename for the module executable.'
-
-property :package_name, [String, Array],
- default: lazy { apache_mod_php_package },
- description: 'Package that contains the Apache PHP module itself'
-
-property :install_package, [true, false],
- default: true,
- description: 'Whether to install the Apache PHP module package'
-
-action :create do
- # install mod_php package (if requested)
- package new_resource.package_name do
- only_if { new_resource.install_package }
- notifies :delete, 'directory[purge distro conf.modules.d]', :immediately
- notifies :delete, 'directory[purge distro conf.d]', :immediately
- end
-
- directory 'purge distro conf.modules.d' do
- path "#{apache_dir}/conf.modules.d"
- recursive true
- action :nothing
- end
-
- directory 'purge distro conf.d' do
- path "#{apache_dir}/conf.d"
- recursive true
- action :nothing
- end
-
- apache2_module 'php' do
- identifier new_resource.module_name
- mod_name new_resource.so_filename
- conf true
- template_cookbook 'apache2'
- notifies :restart, 'service[apache2]'
- end
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy.rb
deleted file mode 100644
index dcb109391096c037adefe772a79a3f4565684012..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-unified_mode true
-
-property :proxy_requests, String,
- default: 'Off',
- description: ''
-
-property :require, String,
- default: 'all denied',
- description: '[See mod_proxy access](https://httpd.apache.org/docs/trunk/mod/mod_proxy.html#access)'
-
-property :add_default_charset, String,
- default: 'off',
- description: 'Add the default Charachter set'
-
-property :proxy_via, String,
- equal_to: %w( Off On Full Block ),
- default: 'On',
- description: 'Enable/disable the handling of HTTP/1.1 "Via:" headers.'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'proxy.conf') do
- source 'mods/proxy.conf.erb'
- cookbook 'apache2'
- variables(
- proxy_requests: new_resource.proxy_requests,
- require: new_resource.require,
- add_default_charset: new_resource.add_default_charset,
- proxy_via: new_resource.proxy_via
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_balancer.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_balancer.rb
deleted file mode 100644
index 11fc3d436e7bcc8de32b78abeb7006ddcd2c0e6f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_balancer.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-unified_mode true
-
-property :status_location, String,
- default: '/balancer-manager',
- description: ''
-
-property :set_handler, String,
- default: 'balancer-manager',
- description: ''
-
-property :require, String,
- default: 'local',
- description: 'For full description see https://httpd.apache.org/docs/2.4/mod/mod_authz_core.html#require'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'proxy_balancer.conf') do
- source 'mods/proxy_balancer.conf.erb'
- cookbook 'apache2'
- variables(
- status_location: new_resource.status_location,
- set_handler: new_resource.set_handler,
- require: new_resource.require
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_ftp.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_ftp.rb
deleted file mode 100644
index d1c27aefb8e7a79757b3951102732ca6e1c25bf3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_proxy_ftp.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-unified_mode true
-
-property :proxy_ftp_dir_charset, String,
- default: 'UTF-8',
- description: ''
-
-property :proxy_ftp_escape_wildcards, String,
- equal_to: ['on', 'off', ''],
- default: ''
-
-property :proxy_ftp_list_on_wildcard, String,
- equal_to: ['on', 'off', ''],
- default: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'proxy_ftp.conf') do
- source 'mods/proxy_ftp.conf.erb'
- cookbook 'apache2'
- variables(
- proxy_ftp_dir_charset: new_resource.proxy_ftp_dir_charset,
- proxy_ftp_escape_wildcards: new_resource.proxy_ftp_escape_wildcards,
- proxy_ftp_list_on_wildcard: new_resource.proxy_ftp_list_on_wildcard
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_reqtimeout.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_reqtimeout.rb
deleted file mode 100644
index 5d73c89c3edd3ae1d91465d8df1a6de7a65e99de..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_reqtimeout.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-unified_mode true
-
-property :request_read_timeout, Hash,
- default: {
- '1': 'header=20-40,minrate=500',
- '2': 'body=10,minrate=500',
- },
- description: 'A hash of ordered rules.
-For full information see https://httpd.apache.org/docs/2.4/mod/mod_reqtimeout.html'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'reqtimeout.conf') do
- source 'mods/reqtimeout.conf.erb'
- cookbook 'apache2'
- variables(request_read_timeout: new_resource.request_read_timeout)
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_setenvif.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_setenvif.rb
deleted file mode 100644
index 94e4761d80913fe7dc32e570ef1efc9a65a901b4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_setenvif.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-unified_mode true
-
-property :browser_match, Array,
- default: [
- '"Mozilla/2" nokeepalive',
- '"MSIE 4\.0b2;" nokeepalive downgrade-1.0 force-response-1.0',
- '"RealPlayer 4\.0" force-response-1.0',
- '"Java/1\.0" force-response-1.0',
- '"JDK/1\.0" force-response-1.0',
- '"Microsoft Data Access Internet Publishing Provider" redirect-carefully',
- '"MS FrontPage" redirect-carefully',
- '"^WebDrive" redirect-carefully',
- '"^WebDAVFS/1.[012]" redirect-carefully',
- '"^gnome-vfs/1.0" redirect-carefully',
- '"^gvfs/1" redirect-carefully',
- '"^XML Spy" redirect-carefully',
- '"^Dreamweaver-WebDAV-SCM1" redirect-carefully',
- '"Konqueror/4" redirect-carefully',
- ],
- description: ''
-
-property :browser_match_nocase, Array,
- default: [],
- description: ''
-
-property :set_env_if_no_case, Array,
- default: [],
- description: ''
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'setenvif.conf') do
- source 'mods/setenvif.conf.erb'
- cookbook 'apache2'
- variables(
- browser_match: new_resource.browser_match,
- browser_matches_no_case: new_resource.browser_match_nocase,
- set_env_if_no_cases: new_resource.set_env_if_no_case
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_ssl.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_ssl.rb
deleted file mode 100644
index 901a1156f2df85dadcae7d844aa096d8f7802e2c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_ssl.rb
+++ /dev/null
@@ -1,110 +0,0 @@
-unified_mode true
-
-property :mod_ssl_pkg, String,
- default: 'mod_ssl',
- description: 'The name of the mod_ssl package'
-
-property :pass_phrase_dialog, String,
- default: lazy { default_pass_phrase_dialog },
- description: ''
-
-property :session_cache, String,
- default: lazy { default_session_cache },
- description: ''
-
-property :session_cache_timeout, String,
- default: '300',
- description: ''
-
-property :cipher_suite, String,
- default: 'EDH+CAMELLIA:EDH+aRSA:EECDH+aRSA+AESGCM:EECDH+aRSA+SHA256:EECDH:+CAMELLIA128:+AES128:+SSLv3:!aNULL:!eNULL:!LOW:!3DES:!MD5:!EXP:!PSK:!DSS:!RC4:!SEED:!IDEA:!ECDSA:kEDH:CAMELLIA128-SHA:AES128-SHA',
- description: ''
-
-property :honor_cipher_order, String,
- default: 'On',
- description: ''
-
-property :protocol, String,
- default: 'TLSv1.2',
- description: ''
-
-property :insecure_renegotiation, String,
- default: 'Off',
- description: ''
-
-property :strict_sni_vhost_check, String,
- default: 'Off',
- description: ''
-
-property :compression, String,
- default: 'Off',
- description: ''
-
-property :use_stapling, String,
- default: 'Off',
- description: ''
-
-property :stapling_responder_timeout, String,
- default: '5',
- description: ''
-
-property :stapling_return_responder_errors, String,
- default: 'Off',
- description: ''
-
-property :stapling_cache, String,
- default: 'shmcb:/var/run/ocsp(128000)',
- description: ''
-
-property :directives, Hash,
- description: ''
-
-action :create do
- if platform_family?('rhel', 'fedora', 'suse', 'amazon')
- with_run_context :root do
- package new_resource.mod_ssl_pkg do
- notifies :run, 'execute[generate-module-list]', :immediately
- only_if { platform_family?('rhel', 'fedora', 'amazon') }
- end
- end
- end
-
- file "#{apache_dir}/conf.d/ssl.conf" do
- content '# SSL Conf is under mods-available/ssl.conf - apache2 cookbook\n'
- only_if { ::File.exist?("#{apache_dir}/conf.d") }
- end
-
- template ::File.join(apache_dir, 'mods-available', 'ssl.conf') do
- source 'mods/ssl.conf.erb'
- cookbook 'apache2'
- variables(
- pass_phrase_dialog: new_resource.pass_phrase_dialog,
- session_cache: new_resource.session_cache,
- session_cache_timeout: new_resource.session_cache_timeout,
- cipher_suite: new_resource.cipher_suite,
- honor_cipher_order: new_resource.honor_cipher_order,
- protocol: new_resource.protocol,
- insecure_renegotiation: new_resource.insecure_renegotiation,
- strict_sni_vhost_check: new_resource.strict_sni_vhost_check,
- compression: new_resource.compression,
- use_stapling: new_resource.use_stapling,
- stapling_responder_timeout: new_resource.stapling_responder_timeout,
- stapling_return_responder_errors: new_resource.stapling_return_responder_errors,
- stapling_cache: new_resource.stapling_cache,
- directives: new_resource.directives
- )
- end
-
- apache2_module 'socache_shmcb'
-
- %w( conf.d conf.modules.d ).each do |dir|
- directory "#{apache_dir}/#{dir}" do
- recursive true
- action :delete
- end
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_status.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_status.rb
deleted file mode 100644
index e7d74d1956dfc699abf3c9b3a75af8e875ee234d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_status.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-unified_mode true
-
-property :location, String,
- default: '/server-status',
- description: ''
-
-property :status_allow_list, [String, Array],
- default: %w(127.0.0.1 ::1),
- description: 'Clients in the specified IP address ranges can access the resource.
-For full description see https://httpd.apache.org/docs/2.4/mod/mod_authz_core.html#require'
-
-property :extended_status, String,
- equal_to: %w(On Off),
- default: 'Off',
- description: 'For info see: https://httpd.apache.org/docs/current/mod/mod_status.html'
-
-property :proxy_status, String,
- equal_to: %w(On Off),
- default: 'On',
- description: 'For info see: https://httpd.apache.org/docs/current/mod/mod_status.html'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'status.conf') do
- source 'mods/status.conf.erb'
- cookbook 'apache2'
- variables(
- location: new_resource.location,
- status_allow_list: Array(new_resource.status_allow_list),
- extended_status: new_resource.extended_status,
- proxy_status: new_resource.proxy_status
- )
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_userdir.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_userdir.rb
deleted file mode 100644
index afd7badcba6abe654e12985cf431c1bc65105deb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_userdir.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-unified_mode true
-
-property :public_html_dir, String,
- default: '/home/*/public_html',
- description: ''
-
-property :options, String,
- default: 'MultiViews Indexes SymLinksIfOwnerMatch IncludesNoExec',
- description: ''
-
-property :allow_override, String,
- default: 'FileInfo AuthConfig Limit Indexes',
- description: 'For full description see https://httpd.apache.org/docs/2.4/mod/core.html#allowoverride'
-
-action :create do
- template ::File.join(apache_dir, 'mods-available', 'userdir.conf') do
- source 'mods/userdir.conf.erb'
- cookbook 'apache2'
- variables(
- public_html_dir: new_resource.public_html_dir,
- allow_override: new_resource.allow_override,
- options: new_resource.options
- )
- end
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/mod_wsgi.rb b/lc-gdn-chef/cookbooks/apache2/resources/mod_wsgi.rb
deleted file mode 100644
index adfea913a215ec54d332018591076dbdb5f9e12d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/mod_wsgi.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-unified_mode true
-
-property :name, String, default: ''
-
-property :module_name, String,
- default: 'wsgi_module',
- description: 'Module name for the Apache WSGI module.'
-
-property :so_filename, String,
- default: lazy { apache_mod_wsgi_filename },
- description: 'Filename for the module executable.'
-
-property :package_name, [String, Array],
- default: lazy { apache_mod_wsgi_package },
- description: 'Package that contains the Apache WSGI module itself'
-
-property :install_package, [true, false],
- default: true,
- description: 'Whether to install the Apache WSGI module package'
-
-action :create do
- # install mod_wsgi package (if requested)
- package new_resource.package_name do
- only_if { new_resource.install_package }
- notifies :delete, 'directory[purge distro conf.modules.d]', :immediately
- end
-
- directory 'purge distro conf.modules.d' do
- path "#{apache_dir}/conf.modules.d"
- recursive true
- action :nothing
- end
-
- apache2_module 'wsgi' do
- identifier new_resource.module_name
- mod_name new_resource.so_filename
- notifies :restart, 'service[apache2]'
- end
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/module.rb b/lc-gdn-chef/cookbooks/apache2/resources/module.rb
deleted file mode 100644
index 5f056dda63f2e9ecf8bc0e545677a4d1055d252c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/module.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-unified_mode true
-
-property :mod_name, String,
- default: lazy { "mod_#{name}.so" },
- description: 'The full name of the file'
-
-property :path, String,
- default: lazy { "#{apache_libexec_dir}/#{mod_name}" },
- description: ''
-
-property :identifier, String,
- default: lazy { "#{name}_module" },
- description: 'String to identify the module for the `LoadModule` directive'
-
-property :mod_conf, Hash,
- default: {},
- description: 'Pass properties to apache2_mod_ and config file template'
-
-property :conf, [true, false],
- default: lazy { config_file?(name) },
- description: 'The default is set by the config_file? helper. Override to set whether the module should have a config file'
-
-property :template_cookbook, String,
- default: '',
- description: 'Cookbook to source the config file template from'
-
-property :apache_service_notification, Symbol,
- equal_to: %i( reload restart ),
- default: :reload,
- description: 'Service notifcation for apache2 service, accepts reload or restart.'
-
-action :enable do
- # Create apache2_mod_resource if we want it configured
- if new_resource.conf
- # manage template directly if using template from external cookbook since no mod_ resource for it, probably
- if !new_resource.template_cookbook.empty?
- template ::File.join(apache_dir, 'mods-available', "#{new_resource.name}.conf") do
- source "mods/#{new_resource.name}.conf.erb"
- cookbook new_resource.template_cookbook
- variables new_resource.mod_conf
- end
- else
- declare_resource("apache2_mod_#{new_resource.name}".to_sym, 'default') do
- new_resource.mod_conf.each { |k, v| send(k, v) }
- end
- end
- end
-
- file ::File.join(apache_dir, 'mods-available', "#{new_resource.name}.load") do
- content "LoadModule #{new_resource.identifier} #{new_resource.path}\n"
- mode '0644'
- end
-
- execute "a2enmod #{new_resource.name}" do
- command "/usr/sbin/a2enmod #{new_resource.name}"
- notifies new_resource.apache_service_notification, 'service[apache2]', :delayed
- not_if { mod_enabled?(new_resource) }
- end
-end
-
-action :disable do
- execute "a2dismod #{new_resource.name}" do
- command "/usr/sbin/a2dismod #{new_resource.name}"
- notifies new_resource.apache_service_notification, 'service[apache2]', :delayed
- only_if { mod_enabled?(new_resource) }
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/resources/site.rb b/lc-gdn-chef/cookbooks/apache2/resources/site.rb
deleted file mode 100644
index eae464ab20da4eaeb7db509d646d01e041c39f6a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/resources/site.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-unified_mode true
-
-property :site_name, String,
- name_property: true,
- description: 'Name of the site to enable/disable'
-
-action :enable do
- execute "a2ensite #{new_resource.site_name}" do
- command "/usr/sbin/a2ensite #{new_resource.site_name}"
- notifies :reload, 'service[apache2]', :delayed
- not_if { apache_site_enabled?(new_resource.site_name) }
- only_if { apache_site_available?(new_resource.site_name) }
- end
-end
-
-action :disable do
- execute "a2dissite #{new_resource.site_name}" do
- command "/usr/sbin/a2dissite #{new_resource.site_name}"
- notifies :reload, 'service[apache2]', :delayed
- only_if { apache_site_enabled?(new_resource.site_name) }
- end
-end
-
-action_class do
- include Apache2::Cookbook::Helpers
-end
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/a2enmod.erb b/lc-gdn-chef/cookbooks/apache2/templates/a2enmod.erb
deleted file mode 100644
index 622e5ec2c53f4d8339a5e691a6d21e11b85e6f10..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/a2enmod.erb
+++ /dev/null
@@ -1,541 +0,0 @@
-#!<%= node['platform_family'] == 'freebsd' ? '/usr/local/bin/perl' : '/usr/bin/perl'%> -w
-#
-# a2enmod by Stefan Fritsch
-# Licensed under Apache License 2.0
-#
-# The coding style is "perltidy -pbp"
-
-use strict;
-use Cwd 'realpath';
-use File::Spec;
-use File::Basename;
-use File::Path;
-use Getopt::Long;
-
-my $quiet;
-my $force;
-my $maintmode;
-my $purge;
-my $delete;
-
-Getopt::Long::Configure('bundling');
-GetOptions(
- 'quiet|q' => \$quiet,
- 'force|f' => \$force,
- 'maintmode|m' => \$maintmode,
- 'purge|p' => \$purge,
- 'delete|d' => \$delete
-) or exit 2;
-
-my $basename = basename($0);
-$basename =~ /^a2(en|dis)(mod|site|conf)((?:-.+)?)$/
- or die "$basename call name unknown\n";
-my $act = $1;
-my $obj = $2;
-my $dir_suffix = $3;
-
-my $env_file = $ENV{APACHE_ENVVARS}
- || (
- $ENV{APACHE_CONFDIR}
- ? "$ENV{APACHE_CONFDIR}/envvars"
- : "<%= @apache_dir %>$dir_suffix/envvars"
- );
-$ENV{LANG} = 'C';
-read_env_file($env_file);
-
-$act .= 'able';
-my ( $name, $dir, $sffx, $reload );
-if ( $obj eq 'mod' ) {
- $obj = 'module';
- $dir = 'mods';
- $sffx = '.load';
- $reload = 'restart';
-}
-elsif ( $obj eq 'conf' ) {
- $obj = 'conf';
- $dir = 'conf';
- $sffx = '.conf';
- $reload = 'reload';
-}
-else {
- $dir = 'sites';
- $sffx = '.conf';
- $reload = 'reload';
-}
-$name = ucfirst($obj);
-
-my $confdir = $ENV{APACHE_CONFDIR} || "<%= @apache_dir %>$dir_suffix";
-my $availdir = $ENV{ uc("APACHE_${dir}_AVAILABLE") }
- || "$confdir/$dir-available";
-my $enabldir = $ENV{ uc("APACHE_${dir}_ENABLED") } || "$confdir/$dir-enabled";
-my $statedir = $ENV{ uc("APACHE_STATE_DIRECTORY") } || "<%= @lib_dir %>";
-
-$statedir .= "/$obj";
-
-my $choicedir = $act eq 'enable' ? $availdir : $enabldir;
-my $linkdir = File::Spec->abs2rel( $availdir, $enabldir );
-
-my $request_reload = 0;
-
-my $rc = 0;
-
-# Fix backwards compatibility for SUSE
-if ( -e "/usr/bin/zypper" ) {
- if ( $quiet || $delete ) {
- exit 0
- }
-}
-
-if ( !scalar @ARGV ) {
- my @choices = myglob('*');
- print "Your choices are: @choices\n";
- print "Which ${obj}(s) do you want to $act (wildcards ok)?\n";
- my $input = <>;
- @ARGV = split /\s+/, $input;
-
-}
-
-my @objs;
-foreach my $arg (@ARGV) {
- $arg =~ s/${sffx}$//;
- my @glob = myglob($arg);
- if ( !@glob ) {
- error("No $obj found matching $arg!\n");
- $rc = 1;
- }
- else {
- push @objs, @glob;
- }
-}
-
-foreach my $acton (@objs) {
- doit($acton) or $rc = 1;
-}
-
-info(
- "To activate the new configuration, you need to run:\n service apache2 $reload\n"
-) if $request_reload;
-
-exit($rc);
-
-##############################################################################
-
-sub myglob {
- my $arg = shift;
-
- my @glob = map {
- s{^$choicedir/}{};
- s{$sffx$}{};
- $_
- } glob("$choicedir/$arg$sffx");
-
- return @glob;
-}
-
-sub doit {
- my $acton = shift;
-
- my ( $conftgt, $conflink );
- if ( $obj eq 'module' ) {
- if ( $acton eq 'cgi' && threaded() ) {
- print
- "Your MPM seems to be threaded. Selecting cgid instead of cgi.\n";
- $acton = 'cgid';
- }
-
- $conftgt = "$availdir/$acton.conf";
- if ( -e $conftgt ) {
- $conflink = "$enabldir/$acton.conf";
- }
- }
-
- my $tgt = "$availdir/$acton$sffx";
- my $link = "$enabldir/$acton$sffx";
-
- if ( !-e $tgt ) {
- if ( -l $link && !-e $link ) {
- if ( $act eq 'disable' ) {
- info("removing dangling symlink $link\n");
- unlink($link);
-
- # force a .conf path. It may exist as dangling link, too
- $conflink = "$enabldir/$acton.conf";
-
- if ( -l $conflink && !-e $conflink ) {
- info("removing dangling symlink $conflink\n");
- unlink($conflink);
- }
-
- return 1;
- }
- else {
- error("$link is a dangling symlink!\n");
- }
- }
-
- if ( $purge ) {
- switch_marker( $obj, $act, $acton );
- # exit silently, we are purging anyway
- return 1;
- }
-
- error("$name $acton does not exist!\n");
- return 0;
- }
-
- # handle module dependencies
- if ( $obj eq 'module' ) {
- if ( $act eq 'enable' ) {
- if ( $acton eq 'mpm_itk' ) {
- warning( "MPM_ITK is a third party module that is not part "
- . "of the official Apache HTTPD. It has seen less "
- . "testing than the official MPM modules." );
- }
- my @depends = get_deps("$availdir/$acton.load");
- do_deps( $acton, @depends ) or return 0;
-
- my @conflicts = get_deps( "$availdir/$acton.load", "Conflicts" );
- check_conflicts( $acton, @conflicts ) or return 0;
- }
- else {
- my @depending;
- foreach my $d ( glob("$enabldir/*.load") ) {
- my @deps = get_deps($d);
- if ( is_in( $acton, @deps ) ) {
- $d =~ m,/([^/]+).load$,;
- push @depending, $1;
- }
- }
- if ( scalar @depending ) {
- if ($force) {
- do_deps( $acton, @depending ) or return 0;
- }
- else {
- error(
- "The following modules depend on $acton ",
- "and need to be disabled first: @depending\n"
- );
- return 0;
- }
- }
- }
- }
- elsif ( $act eq 'enable' ) {
- my @depends = get_deps("$availdir/$acton$sffx");
- warn_deps( $acton, @depends ) or return 0;
- }
-
- if ( $act eq 'enable' ) {
- my $check = check_link( $tgt, $link );
- if ( $check eq 'ok' ) {
- if ($conflink) {
-
- # handle .conf file
- my $confcheck = check_link( $conftgt, $conflink );
- if ( $confcheck eq 'ok' ) {
- info("$name $acton already enabled\n");
- return 1;
- }
- elsif ( $confcheck eq 'missing' ) {
- print "Enabling config file $acton.conf.\n";
- add_link( $conftgt, $conflink ) or return 0;
- }
- else {
- error(
- "Config file $acton.conf not properly enabled: $confcheck\n"
- );
- return 0;
- }
- }
- else {
- info("$name $acton already enabled\n");
- return 1;
- }
- }
- elsif ( $check eq 'missing' ) {
- if ($conflink) {
-
- # handle .conf file
- my $confcheck = check_link( $conftgt, $conflink );
- if ( $confcheck eq 'missing' ) {
- add_link( $conftgt, $conflink ) or return 0;
- }
- elsif ( $confcheck ne 'ok' ) {
- error(
- "Config file $acton.conf not properly enabled: $confcheck\n"
- );
- return 0;
- }
- }
-
- print "Enabling $obj $acton.\n";
- if ( $acton eq 'ssl' ) {
- info( "See /usr/share/doc/apache2/README.Debian.gz on "
- . "how to configure SSL and create self-signed certificates.\n"
- );
- }
- return add_link( $tgt, $link )
- && switch_marker( $obj, $act, $acton );
- }
- else {
- error("$name $acton not properly enabled: $check\n");
- return 0;
- }
- }
- else {
- if ( -e $link || -l $link ) {
- remove_link($link);
- if ( $conflink && -e $conflink ) {
- remove_link($conflink);
- }
- switch_marker( $obj, $act, $acton );
- print "$name $acton disabled.\n";
- }
- elsif ( $conflink && -e $conflink ) {
- print "Disabling stale config file $acton.conf.\n";
- remove_link($conflink);
- }
- else {
- info("$name $acton already disabled\n");
- if ( $purge ) {
- switch_marker( $obj, $act, $acton );
- }
- return 1;
- }
- }
-
- return 1;
-}
-
-sub get_deps {
- my $file = shift;
- my $type = shift || "Depends";
-
- my $fd;
- if ( !open( $fd, '<', $file ) ) {
- error("Can't open $file: $!");
- return;
- }
- my $line;
- while ( defined( $line = <$fd> ) ) {
- chomp $line;
- if ( $line =~ /^# $type:\s+(.*?)\s*$/ ) {
- my $deps = $1;
- return split( /[\n\s]+/, $deps );
- }
-
- # only check until the first non-empty non-comment line
- last if ( $line !~ /^\s*(?:#.*)?$/ );
- }
- return;
-}
-
-sub do_deps {
- my $acton = shift;
- foreach my $d (@_) {
- info("Considering dependency $d for $acton:\n");
- if ( !doit($d) ) {
- error("Could not $act dependency $d for $acton, aborting\n");
- return 0;
- }
- }
- return 1;
-}
-
-sub warn_deps {
- my $acton = shift;
- my $modsenabldir = $ENV{APACHE_MODS_ENABLED} || "$confdir/mods-enabled";
- foreach my $d (@_) {
- info("Checking dependency $d for $acton:\n");
- if ( !-e "$modsenabldir/$d.load" ) {
- warning(
- "Module $d is not enabled, but $acton depends on it, aborting\n"
- );
- return 0;
- }
- }
- return 1;
-}
-
-sub check_conflicts {
- my $acton = shift;
- my $haderror = 0;
- foreach my $d (@_) {
- info("Considering conflict $d for $acton:\n");
-
- my $tgt = "$availdir/$d$sffx";
- my $link = "$enabldir/$d$sffx";
-
- my $confcheck = check_link( $tgt, $link );
- if ( $confcheck eq 'ok' ) {
- error(
- "Module $d is enabled - cannot proceed due to conflicts. It needs to be disabled first!\n"
- );
-
- # Don't return immediately, there could be several conflicts
- $haderror++;
- }
- }
-
- if ($haderror) {
- return 0;
- }
-
- return 1;
-}
-
-sub add_link {
- my ( $tgt, $link ) = @_;
-
- # create relative link
- if ( !symlink( File::Spec->abs2rel( $tgt, dirname($link) ), $link ) ) {
- die("Could not create $link: $!\n");
- }
- $request_reload = 1;
- return 1;
-}
-
-sub check_link {
- my ( $tgt, $link ) = @_;
-
- if ( !-e $link ) {
- if ( -l $link ) {
-
- # points to nowhere
- info("Removing dangling link $link");
- unlink($link) or die "Could not remove $link\n";
- }
- return 'missing';
- }
-
- if ( -e $link && !-l $link ) {
- return "$link is a real file, not touching it";
- }
- if ( realpath($link) ne realpath($tgt) ) {
- return "$link exists but does not point to $tgt, not touching it";
- }
- return 'ok';
-}
-
-sub remove_link {
- my ($link) = @_;
-
- if ( -l $link ) {
- unlink($link) or die "Could not remove $link: $!\n";
- }
- elsif ( -e $link ) {
- error("$link is not a symbolic link, not deleting\n");
- return 0;
- }
- $request_reload = 1;
- return 1;
-}
-
-sub threaded {
- my $result = "";
- $result = qx{<%= @apachectl %> -V | grep 'threaded'}
- if -x '<%= @apachectl %>';
- if ( $? != 0 ) {
-
- # config doesn't work
- if ( -e "$enabldir/mpm_prefork.load" || -e "$enabldir/mpm_itk.load" )
- {
- return 0;
- }
- elsif (-e "$enabldir/mpm_worker.load"
- || -e "$enabldir/mpm_event.load" )
- {
- return 1;
- }
- else {
- error("Can't determine enabled MPM");
-
- # do what user requested
- return 0;
- }
- }
- if ( $result =~ / no/ ) {
- return 0;
- }
- elsif ( $result =~ / yes/ ) {
- return 1;
- }
- else {
- die("Can't parse output from apache2ctl -V:\n$result\n");
- }
-}
-
-sub info {
- print @_ if !$quiet;
-}
-
-sub error {
- print STDERR 'ERROR: ', @_;
-}
-
-sub warning {
- print STDERR 'WARNING: ', @_;
-}
-
-sub is_in {
- my $needle = shift;
- foreach my $e (@_) {
- return 1 if $needle eq $e;
- }
- return 0;
-}
-
-sub read_env_file {
- my $file = shift;
-
- -r $file or return;
- my @lines = qx{env - sh -c '. $file && env'};
- if ($?) {
- die "Could not read $file\n";
- }
-
- foreach my $l (@lines) {
- chomp $l;
- $l =~ /^(.*)?=(.*)$/ or die "Could not parse $file\n";
- $ENV{$1} = $2;
- }
-}
-
-sub switch_marker {
- die('usage: switch_marker([module|site|conf], [enable|disable], $name)')
- if @_ != 3;
- my $which = shift;
- my $what = shift;
- my $name = shift;
-
- my $mode = "admin";
- $mode = "maint" if $maintmode;
-
- #print("switch_marker $which $what $name\n");
- # TODO: get rid of the magic string(s)
- my $state_marker_dir = "$statedir/$what" . "d" . "_by_$mode";
- my $state_marker = "$state_marker_dir/$name";
- if ( !-d $state_marker_dir ) {
- File::Path::mkpath("$state_marker_dir")
- || error(
- "Failed to create marker directory: '$state_marker_dir'\n");
- }
-
- # XXX: swap find with perl alternative
- my @markers = qx{find "$statedir" -type f -a -name "$name"};
- chomp(@markers);
- foreach (@markers) {
- unless ( unlink $_ ) {
- error("Failed to remove old marker '$_'!\n") && return 0;
- }
- }
- unless ($purge) {
- qx{touch "$state_marker"};
- if ( $? != 0 ) {
- error("Failed to create marker '$state_marker'!\n") && return 0;
- }
- return 1;
- }
-}
-
-# vim: syntax=perl sw=4 sts=4 sr et
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/apache2.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/apache2.conf.erb
deleted file mode 100644
index dac9ea68de547bf6a0a974cc4704462375e68bca..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/apache2.conf.erb
+++ /dev/null
@@ -1,171 +0,0 @@
-# This file was generated by Chef for <%= node['fqdn'] %>.
-#
-# Based on the Ubuntu 18.04 apache2.conf
-#
-ServerRoot "<%= @apache_dir %>"
-
-#
-# Hostname and port that the server uses to identify itself
-#
-<% unless @server_name.nil? %>
-ServerName <%= @server_name %>
-<% end %>
-
-#
-# The accept serialization lock file MUST BE STORED ON A LOCAL DISK.
-#
-Mutex file:<%= @lock_dir %> default
-
-#
-# The directory where shm and other runtime files will be stored.
-#
-
-DefaultRuntimeDir <%= @run_dir %>
-
-#
-# PidFile: The file in which the server should record its process
-# identification number when it starts.
-#
-PidFile <%= @pid_file %>
-
-#
-# Timeout: The number of seconds before receives and sends time out.
-#
-Timeout <%= @timeout %>
-
-#
-# KeepAlive: Whether or not to allow persistent connections (more than
-# one request per connection). Set to "Off" to deactivate.
-#
-KeepAlive <%= @keep_alive %>
-
-#
-# MaxKeepAliveRequests: The maximum number of requests to allow
-# during a persistent connection. Set to 0 to allow an unlimited amount.
-# We recommend you leave this number high, for maximum performance.
-#
-MaxKeepAliveRequests <%= @max_keep_alive_requests %>
-
-#
-# KeepAliveTimeout: Number of seconds to wait for the next request from the
-# same client on the same connection.
-#
-KeepAliveTimeout <%= @keep_alive_timeout %>
-
-# These are set in /etc/apache2/envvars
-User <%= @apache_user %>
-Group <%= @apache_group %>
-
-#
-# HostnameLookups: Log the names of clients or just their IP addresses
-# e.g., www.apache.org (on) or 204.62.129.132 (off).
-# The default is off because it'd be overall better for the net if people
-# had to knowingly turn this feature on, since enabling it means that
-# each client request will result in AT LEAST one lookup request to the
-# nameserver.
-#
-HostnameLookups Off
-
-# ErrorLog: The location of the error log file.
-# If you do not specify an ErrorLog directive within a
-# container, error messages relating to that virtual host will be
-# logged here. If you *do* define an error logfile for a
-# container, that host's errors will be logged there and not here.
-#
-ErrorLog <%= @log_dir %>/error.log
-
-#
-# LogLevel: Control the severity of messages logged to the error_log.
-# Available values: trace8, ..., trace1, debug, info, notice, warn,
-# error, crit, alert, emerg.
-# It is also possible to configure the log level for particular modules, e.g.
-# "LogLevel info ssl:warn"
-#
-LogLevel <%= @log_level %>
-
-# Include module configuration:
-IncludeOptional <%= @apache_dir %>/mods-enabled/*.load
-IncludeOptional <%= @apache_dir %>/mods-enabled/*.conf
-
-# Include list of ports to listen on
-Include ports.conf
-
-
-# Sets the default security model of the Apache2 HTTPD server. It does
-# not allow access to the root filesystem outside of /usr/share and /var/www.
-# The former is used by web applications packaged in Debian,
-# the latter may be used for local directories served by the web server. If
-# your system is serving content from a sub-directory in /srv you must allow
-# access here, or in any related virtual host.
-
- Options FollowSymLinks
- AllowOverride None
- Require all denied
-
-
-
- AllowOverride None
- Require all granted
-
-
->
- Options Indexes FollowSymLinks
- AllowOverride None
- Require all granted
-
-
-#
-# Options Indexes FollowSymLinks
-# AllowOverride None
-# Require all granted
-#
-
-# AccessFileName: The name of the file to look for in each directory
-# for additional configuration directives. See also the AllowOverride
-# directive.
-#
-AccessFileName <%= @access_file_name %>
-
-#
-# The following lines prevent .htaccess and .htpasswd files from being
-# viewed by Web clients.
-#
-
- Require all denied
-
-
-<% if node['platform_family'] =~ /freebsd/ -%>
-
- AcceptFilter http none
- AcceptFilter https none
-
-<% end %>
-
-#
-# The following directives define some format nicknames for use with
-# a CustomLog directive.
-#
-# These deviate from the Common Log Format definitions in that they use %O
-# (the actual bytes sent including headers) instead of %b (the size of the
-# requested file), because the latter makes it impossible to detect partial
-# requests.
-#
-# Note that the use of %{X-Forwarded-For}i instead of %h is not recommended.
-# Use mod_remoteip instead.
-#
-LogFormat "%v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" vhost_combined
-LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" combined
-LogFormat "%h %l %u %t \"%r\" %>s %O" common
-LogFormat "%{Referer}i -> %U" referer
-LogFormat "%{User-agent}i" agent
-
-# Include of directories ignores editors' and dpkg's backup files,
-# see README.Debian for details.
-
-# Include generic snippets of statements
-IncludeOptional conf-enabled/*.conf
-
-# Include the virtual host configurations:
-IncludeOptional sites-enabled/*.conf
-
-# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/charset.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/charset.conf.erb
deleted file mode 100644
index 65ab604cf141b46caf5cf0c014c24a54a1fafc69..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/charset.conf.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-# Read the documentation before enabling AddDefaultCharset.
-# In general, it is only a good idea if you know that all your files
-# have this encoding. It will override any encoding given in the files
-# in meta http-equiv or xml encoding tags.
-<% unless @default_charset.nil? %>
-<% @default_charset.each do |value| -%>
-AddDefaultCharset <%= value %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/default-site.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/default-site.conf.erb
deleted file mode 100644
index f2f0bc90a4f073f20ccf205d42f59eb58ee1fa38..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/default-site.conf.erb
+++ /dev/null
@@ -1,31 +0,0 @@
-
- # The ServerName directive sets the request scheme, hostname and port that
- # the server uses to identify itself. This is used when creating
- # redirection URLs. In the context of virtual hosts, the ServerName
- # specifies what hostname must appear in the request's Host: header to
- # match this virtual host. For the default virtual host (this file) this
- # value is not decisive as it is used as a last resort host regardless.
- # However, you must set it for any further virtual host explicitly.
- #ServerName www.example.com
-
- ServerAdmin webmaster@localhost
- DocumentRoot <%= @docroot_dir %>
-
- # Available loglevels: trace8, ..., trace1, debug, info, notice, warn,
- # error, crit, alert, emerg.
- # It is also possible to configure the loglevel for particular
- # modules, e.g.
- #LogLevel info ssl:warn
-
- ErrorLog <%= @log_dir %>/error.log
- CustomLog <%= @log_dir %>/access.log combined
-
- # For most configuration files from conf-available/, which are
- # enabled or disabled at a global level, it is possible to
- # include a line for only one particular virtual host. For example the
- # following line enables the CGI configuration for this host only
- # after it has been globally disabled with "a2disconf".
- #Include conf-available/serve-cgi-bin.conf
-
-
-# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/envvars.erb b/lc-gdn-chef/cookbooks/apache2/templates/envvars.erb
deleted file mode 100644
index c2ce2d27e2dad830881a71620e9e808cc1f157e0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/envvars.erb
+++ /dev/null
@@ -1,51 +0,0 @@
-# envvars - default environment variables for apache2ctl
-
-# this won't be correct after changing uid
-unset HOME
-
-# Since there is no sane way to get the parsed apache2 config in scripts, some
-# settings are defined via environment variables and then used in apache2ctl,
-# /etc/init.d/apache2, /etc/logrotate.d/apache2, etc.
-export APACHE_RUN_USER=<%= @apache_user %>
-export APACHE_RUN_GROUP=<%= @apache_group %>
-# temporary state file location. This might be changed to /run in Wheezy+1
-export APACHE_PID_FILE=<%= @pid_file %>
-export APACHE_RUN_DIR=<%= @run_dir %>
-export APACHE_LOCK_DIR=<%= @lock_dir %>
-export APACHE_LOG_DIR=<%= @log_dir %>
-
-## The locale used by some modules like mod_dav
-<%- if @apache_locale != 'system' %>
-export LANG=<%= @apache_locale %>
-export LC_ALL=<%= @apache_locale %>
-<%- else %>
-## Use the system default locale:
-. /etc/default/locale
-export LANG
-<%- end %>
-
-
-## The command to get the status for 'apache2ctl status'.
-## Some packages providing 'www-browser' need '--dump' instead of '-dump'.
-#export APACHE_LYNX='www-browser -dump'
-
-## If you need a higher file descriptor limit, uncomment and adjust the
-## following line (default is 8192):
-#APACHE_ULIMIT_MAX_FILES='ulimit -n 65536'
-
-## If you would like to pass arguments to the web server, add them below
-## to the APACHE_ARGUMENTS environment.
-#export APACHE_ARGUMENTS=''
-
-## Enable the debug mode for maintainer scripts.
-## This will produce a verbose output on package installations of web server modules and web application
-## installations which interact with Apache
-#export APACHE2_MAINTSCRIPT_DEBUG=1
-
-APACHE_STATUSURL=<%= @status_url %>
-
-<% unless @envvars_additional_params.nil? %>
-<% @envvars_additional_params.each do |k,v| %>
-<%= "#{k}=#{v}" %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/etc-sysconfig-httpd.erb b/lc-gdn-chef/cookbooks/apache2/templates/etc-sysconfig-httpd.erb
deleted file mode 100644
index 759418b371a8e1e42a31d86706fef97e760204db..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/etc-sysconfig-httpd.erb
+++ /dev/null
@@ -1,37 +0,0 @@
-# This file is managed by Chef. Changes will be overwritten.
-
-#
-# The default processing model (MPM) is the process-based
-# 'prefork' model. A thread-based model, 'worker', is also
-# available, but does not work with some modules (such as PHP).
-# The service must be stopped before changing this variable.
-#
-HTTPD=<%= @apache_binary %>
-
-#
-# To pass additional options (for instance, -D definitions) to the
-# httpd binary at startup, set OPTIONS here.
-#
-#OPTIONS=
-
-#
-# By default, the httpd process is started in the C locale; to
-# change the locale in which the server runs, the HTTPD_LANG
-# variable can be set.
-#
-HTTPD_LANG=<%= @apache_locale %>
-
-#
-# By default, the httpd process will create the file
-# /var/run/httpd/httpd.pid in which it records its process
-# identification number when it starts. If an alternate location is
-# specified in httpd.conf (via the PidFile directive), the new
-# location needs to be reported in the PIDFILE.
-#
-PIDFILE=<%= @apache_pid_file %>
-
-<% unless @sysconfig_additional_params.nil? %>
-<% @sysconfig_additional_params.each do |k,v| %>
-<%= "#{k}=#{v}" %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/actions.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/actions.conf.erb
deleted file mode 100644
index e4d448b2f6cc41b70440a534a2cf449993c85f76..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/actions.conf.erb
+++ /dev/null
@@ -1,14 +0,0 @@
-
- #
- # Action lets you define media types that will execute a script whenever
- # a matching file is called. This eliminates the need for repeated URL
- # pathnames for oft-used CGI file processors.
- # Format: Action media/type /cgi-script/location
- # Format: Action handler-name /cgi-script/location
- #
- <% unless @actions.empty? %>
- <% @actions.each do |handler_name, location| %>
- Action <%= handler_name location %>
- <% end -%>
- <% end -%>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/alias.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/alias.conf.erb
deleted file mode 100644
index bc5594b93e45cd6643dda32ff69b8129c1cd3961..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/alias.conf.erb
+++ /dev/null
@@ -1,22 +0,0 @@
-
- #
- # Aliases: Add here as many aliases as you need (with no limit). The format is
- # Alias fakename realname
- #
- # Note that if you include a trailing / on fakename then the server will
- # require it to be present in the URL. So "/icons" isn't aliased in this
- # example, only "/icons/". If the fakename is slash-terminated, then the
- # realname must also be slash terminated, and if the fakename omits the
- # trailing slash, the realname must also omit it.
- #
- # We include the /icons/ alias for FancyIndexed directory listings. If
- # you do not use FancyIndexing, you may comment this out.
- #
- Alias /icons/ "<%= @icondir %>/"
-
- ">
- Options <%= @options.join(' ') %>
- AllowOverride <%= @allow_override.join(' ') %>
- Require <%= @require %>
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.conf.erb
deleted file mode 100644
index 17d6b4699eb5ceef7e23864b24666a7d18312968..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.conf.erb
+++ /dev/null
@@ -1,8 +0,0 @@
-CASCookiePath <%= @cache_dir %>/mod_auth_cas/
-CASLoginURL <%= @login_url %>
-CASValidateURL <%= @validate_url %>
-<% unless @directives.nil? -%>
-<% @directives.sort_by { |key, val| key }.each do |directive, value| -%>
-<%= directive %> <%= value %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.load.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.load.erb
deleted file mode 100644
index c150dbae95a36c2f9678764b275c7fa4843bf0dd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/auth_cas.load.erb
+++ /dev/null
@@ -1 +0,0 @@
-LoadModule auth_cas_module <%= @libexec_dir %>/mod_auth_cas.so
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/authopenid.load.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/authopenid.load.erb
deleted file mode 100644
index 5ef7dbe9a440ee6d908984a38fd726a8d2f725b4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/authopenid.load.erb
+++ /dev/null
@@ -1 +0,0 @@
-LoadModule authopenid_module <%= @libexec_dir %>/mod_auth_openid.so
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/autoindex.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/autoindex.conf.erb
deleted file mode 100644
index a95aa1cb16761a74d6d815dc5b1bb6fdaf09cfb2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/autoindex.conf.erb
+++ /dev/null
@@ -1,97 +0,0 @@
-
- #
- # Directives controlling the display of server-generated directory listings.
- #
-
- #
- # IndexOptions: Controls the appearance of server-generated directory
- # listings.
- # Remove/replace the "Charset=UTF-8" if you don't use UTF-8 for your filenames.
- #
- IndexOptions <%= @index_options.join(' ') %>
-
- #
- # AddIcon* directives tell the server which icon to show for different
- # files or filename extensions. These are only displayed for
- # FancyIndexed directories.
- #
- AddIconByEncoding (CMP,/icons/compressed.gif) x-compress x-gzip x-bzip2
-
- AddIconByType (TXT,/icons/text.gif) text/*
- AddIconByType (IMG,/icons/image2.gif) image/*
- AddIconByType (SND,/icons/sound2.gif) audio/*
- AddIconByType (VID,/icons/movie.gif) video/*
-
- AddIcon /icons/binary.gif .bin .exe
- AddIcon /icons/binhex.gif .hqx
- AddIcon /icons/tar.gif .tar
- AddIcon /icons/world2.gif .wrl .wrl.gz .vrml .vrm .iv
- AddIcon /icons/compressed.gif .Z .z .tgz .gz .zip
- AddIcon /icons/a.gif .ps .ai .eps
- AddIcon /icons/layout.gif .html .shtml .htm .pdf
- AddIcon /icons/text.gif .txt
- AddIcon /icons/c.gif .c
- AddIcon /icons/p.gif .pl .py
- AddIcon /icons/f.gif .for
- AddIcon /icons/dvi.gif .dvi
- AddIcon /icons/uuencoded.gif .uu
- AddIcon /icons/script.gif .conf .sh .shar .csh .ksh .tcl
- AddIcon /icons/tex.gif .tex
- # It's a suffix rule, so simply matching "core" matches "score" as well !
- AddIcon /icons/bomb.gif /core
- AddIcon (SND,/icons/sound2.gif) .ogg
- AddIcon (VID,/icons/movie.gif) .ogm
-
- AddIcon /icons/back.gif ..
- AddIcon /icons/hand.right.gif README
- AddIcon /icons/folder.gif ^^DIRECTORY^^
- AddIcon /icons/blank.gif ^^BLANKICON^^
-
- # Default icons for OpenDocument format
- AddIcon /icons/odf6odt-20x22.png .odt
- AddIcon /icons/odf6ods-20x22.png .ods
- AddIcon /icons/odf6odp-20x22.png .odp
- AddIcon /icons/odf6odg-20x22.png .odg
- AddIcon /icons/odf6odc-20x22.png .odc
- AddIcon /icons/odf6odf-20x22.png .odf
- AddIcon /icons/odf6odb-20x22.png .odb
- AddIcon /icons/odf6odi-20x22.png .odi
- AddIcon /icons/odf6odm-20x22.png .odm
-
- AddIcon /icons/odf6ott-20x22.png .ott
- AddIcon /icons/odf6ots-20x22.png .ots
- AddIcon /icons/odf6otp-20x22.png .otp
- AddIcon /icons/odf6otg-20x22.png .otg
- AddIcon /icons/odf6otc-20x22.png .otc
- AddIcon /icons/odf6otf-20x22.png .otf
- AddIcon /icons/odf6oti-20x22.png .oti
- AddIcon /icons/odf6oth-20x22.png .oth
-
- #
- # DefaultIcon is which icon to show for files which do not have an icon
- # explicitly set.
- #
- DefaultIcon /icons/unknown.gif
-
- #
- # AddDescription allows you to place a short description after a file in
- # server-generated indexes. These are only displayed for FancyIndexed
- # directories.
- # Format: AddDescription "description" filename
- #
-
- #
- # ReadmeName is the name of the README file the server will look for by
- # default, and append to directory listings.
- #
- # HeaderName is the name of a file which should be prepended to
- # directory indexes.
- ReadmeName <%= @readme_name%>
- HeaderName <%= @header_name%>
-
- #
- # IndexIgnore is a set of filenames which directory indexing should ignore
- # and not include in the listing. Shell-style wildcarding is permitted.
- #
- IndexIgnore <%= @index_ignore %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/cache_disk.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/cache_disk.conf.erb
deleted file mode 100644
index 95e68513bdd1cbb68f003d48e60f78352d854d6d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/cache_disk.conf.erb
+++ /dev/null
@@ -1,5 +0,0 @@
-
- CacheRoot <%= @cache_root %>
- CacheDirLevels <%= @cache_dir_levels %>
- CacheDirLength <%= @cache_dir_length %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/cgid.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/cgid.conf.erb
deleted file mode 100644
index d321b9b7b0e2c7b3e376afb45b5d0478e7156a2a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/cgid.conf.erb
+++ /dev/null
@@ -1 +0,0 @@
-ScriptSock <%= @script_sock %>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/dav_fs.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/dav_fs.conf.erb
deleted file mode 100644
index 5dcf4cdf238c8bec6eb09fae10aa132a5b854b5a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/dav_fs.conf.erb
+++ /dev/null
@@ -1 +0,0 @@
-DAVLockDB <%= @dav_lock_db %>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/deflate.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/deflate.conf.erb
deleted file mode 100644
index bf74016795e81879e01d695f7a32c0eb87907c5e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/deflate.conf.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
- <% unless @add_output_filter_by_type.empty? %>
- <% @add_output_filter_by_type.each_value do |filter| %>
- AddOutputFilterByType <%= filter %>
- <% end -%>
- <% end -%>
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/dir.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/dir.conf.erb
deleted file mode 100644
index 3c5c8f52ffd433e79b10881982acf7816a292e4c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/dir.conf.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-
- DirectoryIndex <%= @directory_index.join(' ') %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/fastcgi.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/fastcgi.conf.erb
deleted file mode 100644
index eceff33c0a818a6b57e492d82dd9700b3fa7a7f4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/fastcgi.conf.erb
+++ /dev/null
@@ -1,7 +0,0 @@
-
- AddHandler fastcgi-script .fcgi
- <% unless @fast_cgi_wrapper.empty? %>
- FastCgiWrapper <%= @fast_cgi_wrapper %>
- <% end -%>
- FastCgiIpcDir <%= @fast_cgi_ipc_dir %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/fcgid.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/fcgid.conf.erb
deleted file mode 100644
index 502a8ec1f2d992cd6d3de5e542944a14a2fe398d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/fcgid.conf.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-
- <% unless @add_handler.empty? %>
- <% @add_handler.each_value do |handler| %>
- AddHandler <%= handler %>
- <% end -%>
- <% end -%>
-
- IPCConnectTimeout <%= @ipc_connect_timeout %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/include.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/include.conf.erb
deleted file mode 100644
index 52d6e3aa27c07a863d491eb3ba6b5318d14af832..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/include.conf.erb
+++ /dev/null
@@ -1,13 +0,0 @@
-
- <% unless @add_type.empty? %>
- <% @add_type.each_value do |type| %>
- AddType <%= type %>
- <% end -%>
- <% end -%>
-
- <% unless @add_output_filter.empty? %>
- <% @add_output_filter.each_value do |filter| %>
- AddOutputFilter <%= filter %>
- <% end -%>
- <% end -%>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/info.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/info.conf.erb
deleted file mode 100644
index 6915e7d88cbac8851a54d65f2f4cebe1edc36800..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/info.conf.erb
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
- SetHandler server-info
- Require local
- Require ip <%= @info_allow_list.join(' ') %>
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/ldap.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/ldap.conf.erb
deleted file mode 100644
index 6ec7e10640f365d3033098122c93ea2a6122724e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/ldap.conf.erb
+++ /dev/null
@@ -1,4 +0,0 @@
->
- SetHandler <%= @set_handler %>
- Require <%= @require %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/mime.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/mime.conf.erb
deleted file mode 100644
index 568a709458d9a1d80d98f23eae6f91211785424b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/mime.conf.erb
+++ /dev/null
@@ -1,109 +0,0 @@
-
- TypesConfig <%= @types_config %>
-
- AddLanguage ca .ca
- AddLanguage cs .cz .cs
- AddLanguage da .dk
- AddLanguage de .de
- AddLanguage el .el
- AddLanguage en .en
- AddLanguage eo .eo
- AddLanguage es .es
- AddLanguage et .et
- AddLanguage fr .fr
- AddLanguage he .he
- AddLanguage hr .hr
- AddLanguage it .it
- AddLanguage ja .ja
- AddLanguage ko .ko
- AddLanguage ltz .ltz
- AddLanguage nl .nl
- AddLanguage nn .nn
- AddLanguage no .no
- AddLanguage pl .po
- AddLanguage pt .pt
- AddLanguage pt-BR .pt-br
- AddLanguage ru .ru
- AddLanguage sv .sv
- AddLanguage tr .tr
- AddLanguage zh-CN .zh-cn
- AddLanguage zh-TW .zh-tw
-
- #
- # Commonly used filename extensions to character sets. You probably
- # want to avoid clashes with the language extensions, unless you
- # are good at carefully testing your setup after each change.
- # See http://www.iana.org/assignments/character-sets for the
- # official list of charset names and their respective RFCs.
- #
- AddCharset us-ascii .ascii .us-ascii
- AddCharset ISO-8859-1 .iso8859-1 .latin1
- AddCharset ISO-8859-2 .iso8859-2 .latin2 .cen
- AddCharset ISO-8859-3 .iso8859-3 .latin3
- AddCharset ISO-8859-4 .iso8859-4 .latin4
- AddCharset ISO-8859-5 .iso8859-5 .cyr .iso-ru
- AddCharset ISO-8859-6 .iso8859-6 .arb .arabic
- AddCharset ISO-8859-7 .iso8859-7 .grk .greek
- AddCharset ISO-8859-8 .iso8859-8 .heb .hebrew
- AddCharset ISO-8859-9 .iso8859-9 .latin5 .trk
- AddCharset ISO-8859-10 .iso8859-10 .latin6
- AddCharset ISO-8859-13 .iso8859-13
- AddCharset ISO-8859-14 .iso8859-14 .latin8
- AddCharset ISO-8859-15 .iso8859-15 .latin9
- AddCharset ISO-8859-16 .iso8859-16 .latin10
- AddCharset ISO-2022-JP .iso2022-jp .jis
- AddCharset ISO-2022-KR .iso2022-kr .kis
- AddCharset ISO-2022-CN .iso2022-cn .cis
- AddCharset Big5 .Big5 .big5 .b5
- AddCharset cn-Big5 .cn-big5
- AddCharset WINDOWS-1251 .cp-1251 .win-1251
- AddCharset CP866 .cp866
- AddCharset KOI8 .koi8
- AddCharset KOI8-E .koi8-e
- AddCharset KOI8-r .koi8-r .koi8-ru
- AddCharset KOI8-U .koi8-u
- AddCharset KOI8-ru .koi8-uk .ua
- AddCharset ISO-10646-UCS-2 .ucs2
- AddCharset ISO-10646-UCS-4 .ucs4
- AddCharset UTF-7 .utf7
- AddCharset UTF-8 .utf8
- AddCharset UTF-16 .utf16
- AddCharset UTF-16BE .utf16be
- AddCharset UTF-16LE .utf16le
- AddCharset UTF-32 .utf32
- AddCharset UTF-32BE .utf32be
- AddCharset UTF-32LE .utf32le
- AddCharset euc-cn .euc-cn
- AddCharset euc-gb .euc-gb
- AddCharset euc-jp .euc-jp
- AddCharset euc-kr .euc-kr
- AddCharset EUC-TW .euc-tw
- AddCharset gb2312 .gb2312 .gb
- AddCharset iso-10646-ucs-2 .ucs-2 .iso-10646-ucs-2
- AddCharset iso-10646-ucs-4 .ucs-4 .iso-10646-ucs-4
- AddCharset shift_jis .shift_jis .sjis
-
- <% unless @add_encoding.empty? %>
- <% @add_encoding.each_value do |encoding| %>
- AddEncoding <%= encoding %>
- <% end -%>
- <% end -%>
-
- <% unless @add_handler.empty? %>
- <% @add_handler.each_value do |handler| %>
- AddHandler <%= handler %>
- <% end -%>
- <% end -%>
-
- <% unless @add_type.empty? %>
- <% @add_type.each_value do |type| %>
- AddType <%= type %>
- <% end -%>
- <% end -%>
-
- <% unless @add_output_filter.empty? %>
- <% @add_output_filter.each_value do |filter| %>
- AddOutputFilter <%= filter %>
- <% end -%>
- <% end -%>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/mime_magic.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/mime_magic.conf.erb
deleted file mode 100644
index b8fcacc8ce2807aa8703f375e0663e2bc9535099..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/mime_magic.conf.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-
- MIMEMagicFile <%= @mime_magic_file %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_event.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_event.conf.erb
deleted file mode 100644
index 2561f84b7e00ef11fc6083c99767231c1659de82..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_event.conf.erb
+++ /dev/null
@@ -1,11 +0,0 @@
-# event MPM
-
- StartServers <%= @startservers %>
- MinSpareThreads <%= @minsparethreads %>
- MaxSpareThreads <%= @maxsparethreads %>
- ThreadsPerChild <%= @threadsperchild %>
- MaxRequestWorkers <%= @maxrequestworkers %>
- MaxConnectionsPerChild <%= @maxconnectionsperchild %>
- ThreadLimit <%= @threadlimit %>
- ServerLimit <%= @serverlimit %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_prefork.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_prefork.conf.erb
deleted file mode 100644
index ff3951cfaab18516de84a8a5d4bbcefe257ae053..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_prefork.conf.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-# prefork MPM
-
- StartServers <%= @startservers %>
- MinSpareServers <%= @minspareservers %>
- MaxSpareServers <%= @maxspareservers %>
- ServerLimit <%= @serverlimit %>
- MaxRequestWorkers <%= @maxrequestworkers %>
- MaxConnectionsPerChild <%= @maxconnectionsperchild %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_worker.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_worker.conf.erb
deleted file mode 100644
index c4c1d21a4e66bc7337d8744c80cca314e076fa6f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/mpm_worker.conf.erb
+++ /dev/null
@@ -1,11 +0,0 @@
-# worker MPM
-
- StartServers <%= @startservers %>
- MinSpareThreads <%= @minsparethreads %>
- MaxSpareThreads <%= @maxsparethreads %>
- ThreadsPerChild <%= @threadsperchild %>
- MaxRequestWorkers <%= @maxrequestworkers %>
- MaxConnectionsPerChild <%= @maxconnectionsperchild %>
- ThreadLimit <%= @threadlimit %>
- ServerLimit <%= @serverlimit %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/negotiation.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/negotiation.conf.erb
deleted file mode 100644
index e5258e1097d2faef360e8a9a3b607f070a6950d8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/negotiation.conf.erb
+++ /dev/null
@@ -1,17 +0,0 @@
-
- #
- # LanguagePriority allows you to give precedence to some languages
- # in case of a tie during content negotiation.
- #
- # Just list the languages in decreasing order of preference. We have
- # more or less alphabetized them here. You probably want to change this.
- #
- LanguagePriority <%= @language_priority %>
-
- #
- # ForceLanguagePriority allows you to serve a result page rather than
- # MULTIPLE CHOICES (Prefer) [in case of a tie] or NOT ACCEPTABLE (Fallback)
- # [in case no accepted languages matched the available variants]
- #
- ForceLanguagePriority <%= @force_language_priority %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/pagespeed.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/pagespeed.conf.erb
deleted file mode 100644
index 71d3d52c4e837bba7d43ee427875af42d4cddf80..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/pagespeed.conf.erb
+++ /dev/null
@@ -1,95 +0,0 @@
-
- ModPagespeed <%= @mod_page_speed %>
-
- # We want VHosts to inherit global configuration.
- # If this is not included, they'll be independent (except for inherently
- # global options), at least for backwards compatibility.
- ModPagespeedInheritVHostConfig <%= @inherit_vhost_config %>
-
- <% unless @output_filters.empty? %>
- <% @output_filters.each do |filter| %>
- AddOutputFilterByType <%= filter %>
- <% end -%>
- <% end -%>
- # The ModPagespeedFileCachePath directory must exist and be writable
- # by the apache user (as specified by the User directive).
- ModPagespeedFileCachePath "<%= @file_cache_path %>"
-
- <% unless @rewrite_level.empty? %>
- ModPagespeedRewriteLevel <% @rewrite_level %>
- <% end -%>
-
- <% unless @disable_filters.empty? %>
- ModPagespeedDisableFilters <% @disable_filters %>
- <% end -%>
-
- <% unless @enable_filters.empty? %>
- ModPagespeedEnableFilters <% @enable_filters %>
- <% end -%>
-
- <% unless @domain.empty? %>
- ModPagespeedDomain <% @domain %>
- <% end -%>
-
- ModPagespeedFileCacheInodeLimit <%= @file_cache_inode_limit %>
- ModPagespeedAvoidRenamingIntrospectiveJavascript <%= @avoid_renaming_introspective_javascript %>
-
- <% unless @library.empty?%>
- <% @library.each do |library| %>
- ModPagespeedLibrary library
- <% end -%>
- <% end -%>
-
- # This handles the client-side instrumentation callbacks which are injected
- # by the add_instrumentation filter.
- # You can use a different location by adding the ModPagespeedBeaconUrl
- # directive; see the documentation on add_instrumentation.
-
- SetHandler mod_pagespeed_beacon
-
-
-
- # This page lets you view statistics about the mod_pagespeed module.
-
- Order allow,deny
- # You may insert other "Allow from" lines to add hosts you want to
- # allow to look at generated statistics. Another possibility is
- # to comment out the "Order" and "Allow" options from the config
- # file, to allow any client that can reach your server to examine
- # statistics. This might be appropriate in an experimental setup or
- # if the Apache server is protected by a reverse proxy that will
- # filter URLs in some fashion.
- Allow from localhost
- Allow from 127.0.0.1
- SetHandler mod_pagespeed_statistics
-
-
- # This page lets you view a graphical console displaying statistics about
- # the mod_pagespeed module.
-
- Order allow,deny
- # This can be configured similarly to mod_pagespeed_statistics above.
- Allow from localhost
- Allow from 127.0.0.1
- SetHandler mod_pagespeed_console
-
-
- # Page /mod_pagespeed_message lets you view the latest messages from
- # mod_pagespeed, regardless of log-level in your httpd.conf
- # ModPagespeedMessageBufferSize is the maximum number of bytes you would
- # like to dump to your /mod_pagespeed_message page at one time,
- # its default value is 100k bytes.
- # Set it to 0 if you want to disable this feature.
- ModPagespeedMessageBufferSize 100000
-
-
- Allow from localhost
- Allow from 127.0.0.1
- SetHandler mod_pagespeed_message
-
-
- Allow from localhost
- Allow from 127.0.0.1
- SetHandler mod_pagespeed_referer_statistics
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/php.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/php.conf.erb
deleted file mode 100644
index a876e2539afcaaf15b0d6fe2f53bacb9bc862746..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/php.conf.erb
+++ /dev/null
@@ -1,25 +0,0 @@
-
- SetHandler application/x-httpd-php
-
-
- SetHandler application/x-httpd-php-source
- # Deny access to raw php sources by default
- # To re-enable it's recommended to enable access to the files
- # only in specific virtual host or directory
- Require all denied
-
-# Deny access to files without filename (e.g. '.php')
-
- Require all denied
-
-
-# Running PHP scripts in user directories is disabled by default
-#
-# To re-enable PHP in user directories comment the following lines
-# (from to .) Do NOT set it to On as it
-# prevents .htaccess files from disabling it.
-
-
- php_admin_value engine Off
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy.conf.erb
deleted file mode 100644
index cc18fc39f34c8006c060d15ae4895fa3f2c8ebe8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy.conf.erb
+++ /dev/null
@@ -1,16 +0,0 @@
-
- #turning ProxyRequests on and allowing proxying from all may allow
- #spammers to use your proxy to send email.
-
- ProxyRequests <%= @proxy_requests %>
-
-
- AddDefaultCharset <%= @add_default_charset %>
- Require <%= @require %>
-
-
- # Enable/disable the handling of HTTP/1.1 "Via:" headers.
- # ("Full" adds the server version; "Block" removes all outgoing Via: headers)
- # Set to one of: Off | On | Full | Block
- ProxyVia <%= @proxy_via %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_balancer.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_balancer.conf.erb
deleted file mode 100644
index 1ab4867a191d890d852c4a071af7ce9375a80682..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_balancer.conf.erb
+++ /dev/null
@@ -1,10 +0,0 @@
-
- # Balancer manager enables dynamic update of balancer members (needs mod_status)
- #
-
- >
- SetHandler <%= @set_handler %>
- Require <%= @require %>
-
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_ftp.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_ftp.conf.erb
deleted file mode 100644
index e6bf1e33baca12ceb1a169730c3fffe0808dc3e5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/proxy_ftp.conf.erb
+++ /dev/null
@@ -1,12 +0,0 @@
-
- ProxyFtpDirCharset <%= @proxy_ftp_dir_charset %>
-
- <% unless @proxy_ftp_escape_wildcards.empty? %>
- ProxyFtpEscapeWildcards <%= @proxy_ftp_escape_wildcards %>
- <% end -%>
-
- <% unless @proxy_ftp_list_on_wildcard.empty? %>
- ProxyFtpListOnWildcards <%= @proxy_ftp_list_on_wildcard %>
- <% end -%>
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/reqtimeout.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/reqtimeout.conf.erb
deleted file mode 100644
index df9779fd580c741d28446aa933963a65f30095e4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/reqtimeout.conf.erb
+++ /dev/null
@@ -1,26 +0,0 @@
-
- # mod_reqtimeout limits the time waiting on the client to prevent an
- # attacker from causing a denial of service by opening many connections
- # but not sending requests. This file tries to give a sensible default
- # configuration, but it may be necessary to tune the timeout values to
- # the actual situation. Note that it is also possible to configure
- # mod_reqtimeout per virtual host.
-
- # Wait max 20 seconds for the first byte of the request line+headers
- # From then, require a minimum data rate of 500 bytes/s, but don't
- # wait longer than 40 seconds in total.
- # Note: Lower timeouts may make sense on non-ssl virtual hosts but can
- # cause problem with ssl enabled virtual hosts: This timeout includes
- # the time a browser may need to fetch the CRL for the certificate. If
- # the CRL server is not reachable, it may take more than 10 seconds
- # until the browser gives up.
- # RequestReadTimeout header=20-40,minrate=500
-
- # Wait max 10 seconds for the first byte of the request body (if any)
- # From then, require a minimum data rate of 500 bytes/s
- # RequestReadTimeout body=10,minrate=500
-
- <% @request_read_timeout.each_value do |value| %>
- RequestReadTimeout <%= value %>
- <% end %>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/setenvif.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/setenvif.conf.erb
deleted file mode 100644
index 4f6956297cf75f8d60da31f38b6eab40f1bc33e1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/setenvif.conf.erb
+++ /dev/null
@@ -1,19 +0,0 @@
-
- <% unless @browser_match.nil? %>
- <% @browser_match.each do |match| %>
- BrowserMatch <%= match %>
- <% end -%>
- <% end -%>
-
- <% unless @browser_match.nil? %>
- <% @browser_matches_no_case.each do |match| %>
- BrowserMatchNoCase <%= match %>
- <% end -%>
- <% end -%>
-
- <% unless @set_env_if_no_cases.nil? %>
- <% @set_env_if_no_cases.each do |match| %>
- SetEnvIfNoCase <%= match %>
- <% end -%>
- <% end -%>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/ssl.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/ssl.conf.erb
deleted file mode 100644
index 237e637d6975ab1061599ae97043d850042d0027..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/ssl.conf.erb
+++ /dev/null
@@ -1,101 +0,0 @@
-
- #
- # Pseudo Random Number Generator (PRNG):
- # Configure one or more sources to seed the PRNG of the SSL library.
- # The seed data should be of good random quality.
- # WARNING! On some platforms /dev/random blocks if not enough entropy
- # is available. This means you then cannot use the /dev/random device
- # because it would lead to very long connection times (as long as
- # it requires to make more entropy available). But usually those
- # platforms additionally provide a /dev/urandom device which doesn't
- # block. So, if available, use this one instead. Read the mod_ssl User
- # Manual for more details.
- #
- SSLRandomSeed startup builtin
- SSLRandomSeed startup file:/dev/urandom 512
- SSLRandomSeed connect builtin
- SSLRandomSeed connect file:/dev/urandom 512
-
- ##
- ## SSL Global Context
- ##
- ## All SSL configuration in this context applies both to
- ## the main server and all SSL-enabled virtual hosts.
- ##
-
- #
- # Some MIME-types for downloading Certificates and CRLs
- #
- AddType application/x-x509-ca-cert .crt
- AddType application/x-pkcs7-crl .crl
-
- # Pass Phrase Dialog:
- # Configure the pass phrase gathering process.
- # The filtering dialog program (`builtin' is a internal
- # terminal dialog) has to provide the pass phrase on stdout.
- SSLPassPhraseDialog <%= @pass_phrase_dialog %>
-
- # Inter-Process Session Cache:
- # Configure the SSL Session Cache: First the mechanism
- # to use and second the expiring timeout (in seconds).
- SSLSessionCache <%= @session_cache %>
- SSLSessionCacheTimeout <%= @session_cache_timeout %>
-
- # SSL Cipher Suite:
- # List the ciphers that the client is permitted to negotiate.
- # See the mod_ssl documentation for a complete list.
- # enable only secure ciphers:
- SSLCipherSuite <%= @cipher_suite %>
-
- # Speed-optimized SSL Cipher configuration:
- # If speed is your main concern (on busy HTTPS servers e.g.),
- # you might want to force clients to specific, performance
- # optimized ciphers. In this case, prepend those ciphers
- # to the SSLCipherSuite list, and enable SSLHonorCipherOrder.
- # Caveat: by giving precedence to RC4-SHA and AES128-SHA
- # (as in the example below), most connections will no longer
- # have perfect forward secrecy - if the server's key is
- # compromised, captures of past or future traffic must be
- # considered compromised, too.
- #SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5
- SSLHonorCipherOrder <%= @honor_cipher_order %>
-
- # The protocols to enable.
- # Available values: all, SSLv3, TLSv1, TLSv1.1, TLSv1.2
- # SSL v2 is no longer supported
- SSLProtocol <%= @protocol %>
-
- # Allow insecure renegotiation with clients which do not yet support the
- # secure renegotiation protocol. Default: Off
- SSLInsecureRenegotiation <%= @insecure_renegotiation %>
-
-<% unless @strict_sni_vhost_check == "Off"%>
- # Whether to forbid non-SNI clients to access name based virtual hosts.
- # Default: Off
- SSLStrictSNIVHostCheck <%= @strict_sni_vhost_check %>
-<% end %>
-
- # Enable compression on the SSL level
- # Enabling compression causes security issues in most setups (the so called CRIME attack).
- # Default: Off
- SSLCompression <%= @compression %>
-
- # OCSP Stapling, only in httpd 2.3.3 and later
- # This option enables OCSP stapling, as defined by the "Certificate Status Request" TLS
- # extension specified in RFC 6066. If enabled (and requested by the client), mod_ssl will
- # include an OCSP response for its own certificate in the TLS handshake.
- # Configuring an SSLStaplingCache is a prerequisite for enabling OCSP stapling.
- # Default: Off
- <% if @use_stapling == 'On' -%>
- SSLUseStapling <%= @use_stapling %>
- SSLStaplingResponderTimeout <%= @stapling_responder_timeout %>
- SSLStaplingReturnResponderErrors <%= @stapling_return_responder_errors %>
- SSLStaplingCache <%= @stapling_cache %>
- <% end -%>
-
- <% unless @directives.nil? -%>
- <% @directives.sort_by { |key, val| key }.each do |directive, value| -%>
- <%= directive %> <%= value %>
- <% end -%>
- <% end -%>
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/status.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/status.conf.erb
deleted file mode 100644
index 07cbcedc0d1e71bb15fe03741bbccaa08de5643e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/status.conf.erb
+++ /dev/null
@@ -1,25 +0,0 @@
-
- #
- # Allow server status reports generated by mod_status,
- # with the URL of http://servername/server-status
- # Uncomment and change the ".example.com" to allow
- # access from other hosts.
- #
- >
- SetHandler server-status
- Require local
- Require ip <%= @status_allow_list.join(' ') %>
-
-
- #
- # ExtendedStatus controls whether Apache will generate "full" status
- # information (ExtendedStatus On) or just basic information (ExtendedStatus
- # Off) when the "server-status" handler is called. The default is Off.
- #
- ExtendedStatus <%= @extended_status %>
-
-
- # Show Proxy LoadBalancer status in mod_status
- ProxyStatus <%= @proxy_status %>
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/mods/userdir.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/mods/userdir.conf.erb
deleted file mode 100644
index d2ec11d83e409399a58ef2ff597855dddac42f57..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/mods/userdir.conf.erb
+++ /dev/null
@@ -1,15 +0,0 @@
-
- UserDir public_html
- UserDir disabled root
-
- >
- AllowOverride <%= @allow_override %>
- Options <%= @options %>
-
- Require all granted
-
-
- Require all denied
-
-
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/ports.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/ports.conf.erb
deleted file mode 100644
index 0c8a8586fc28ecaa7c0d5c38682b17ace3e66467..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/ports.conf.erb
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file was generated by Chef for <%= node['fqdn'] %>.
-# Do NOT modify this file by hand!
-
-<% @listen.each do |value| -%>
-Listen <%= value %>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/security.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/security.conf.erb
deleted file mode 100644
index 9dbfc65d4018e7f84d87da2465dcd58948561dc7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/security.conf.erb
+++ /dev/null
@@ -1,38 +0,0 @@
-# Changing the following options will not really affect the security of the
-# server, but might make attacks slightly more difficult in some cases.
-
-#
-# ServerTokens
-# This directive configures what you return as the Server HTTP response
-# Header. The default is 'Full' which sends information about the OS-Type
-# and compiled in modules.
-# Set to one of: Full | OS | Minimal | Minor | Major | Prod
-# where Full conveys the most information, and Prod the least.
-#
-<% unless @server_tokens.nil? %>
-ServerTokens <%= @server_tokens %>
-<% end %>
-
-#
-# Optionally add a line containing the server version and virtual host
-# name to server-generated pages (internal error documents, FTP directory
-# listings, mod_status and mod_info output etc., but not CGI generated
-# documents or custom error documents).
-# Set to "EMail" to also include a mailto: link to the ServerAdmin.
-# Set to one of: On | Off | EMail
-#
-<% unless @server_signature.nil? %>
-ServerSignature <%= @server_signature %>
-<% end %>
-
-#
-# Allow TRACE method
-#
-# Set to "extended" to also reflect the request body (only for testing and
-# diagnostic purposes).
-#
-# Set to one of: On | Off | extended
-#
-<% unless @trace_enable.nil? %>
-TraceEnable <%= @trace_enable %>
-<% end %>
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/web_app.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/web_app.conf.erb
deleted file mode 100644
index 4ac4a385a07b014a6fb7ce4c5d22e7fbc5572d13..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/web_app.conf.erb
+++ /dev/null
@@ -1,41 +0,0 @@
->
- ServerName <%= @params[:server_name] %>
- <% if @params[:server_aliases] -%>
- ServerAlias <%= @params[:server_aliases].join " " %>
- <% end -%>
- DocumentRoot <%= @params[:docroot] %>
-
- >
- Options <%= [@params[:directory_options] || "FollowSymLinks" ].flatten.join " " %>
- AllowOverride <%= [@params[:allow_override] || "None" ].flatten.join " " %>
- Require all granted
-
-
-
- Options FollowSymLinks
- AllowOverride None
-
-
-
- SetHandler server-status
- Require local
-
-
- RewriteEngine On
- LogLevel info rewrite:trace1
- ErrorLog <%= node['apache']['log_dir'] %>/<%= @params[:name] %>-error.log
- CustomLog <%= node['apache']['log_dir'] %>/<%= @params[:name] %>-access.log combined
-
- <% if @params[:directory_index] -%>
- DirectoryIndex <%= [@params[:directory_index]].flatten.join " " %>
- <% end -%>
-
- # Canonical host, <%= @params[:server_name] %>
- RewriteCond %{HTTP_HOST} !^<%= @params[:server_name] %> [NC]
- RewriteCond %{HTTP_HOST} !^$
- RewriteRule ^/(.*)$ http://<%= @params[:server_name] %>/$1 [L,R=301]
-
- RewriteCond %{DOCUMENT_ROOT}/system/maintenance.html -f
- RewriteCond %{SCRIPT_FILENAME} !maintenance.html
- RewriteRule ^.*$ /system/maintenance.html [L,R=503]
-
diff --git a/lc-gdn-chef/cookbooks/apache2/templates/welcome.conf.erb b/lc-gdn-chef/cookbooks/apache2/templates/welcome.conf.erb
deleted file mode 100644
index 1a083c4c1371db910316ea4e3e48c9e200875467..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/apache2/templates/welcome.conf.erb
+++ /dev/null
@@ -1,59 +0,0 @@
-#
-# This configuration file enables the default "Welcome" page if there
-# is no default index page present for the root URL. To disable the
-# Welcome page, comment out all the lines below.
-#
-# NOTE: if this file is removed, it will be restored on upgrades.
-#
-
-DocumentRoot "<%= @docroot_dir %>"
-
-# Further relax access to the default document root:
-">
- #
- # Possible values for the Options directive are "None", "All",
- # or any combination of:
- # Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
- #
- # Note that "MultiViews" must be named *explicitly* --- "Options All"
- # doesn't give it to you.
- #
- # The Options directive is both complicated and important. Please see
- # http://httpd.apache.org/docs/2.4/mod/core.html#options
- # for more information.
- #
- Options Indexes FollowSymLinks
-
- #
- # AllowOverride controls what directives may be placed in .htaccess files.
- # It can be "All", "None", or any combination of the keywords:
- # Options FileInfo AuthConfig Limit
- #
- AllowOverride None
-
- #
- # Controls who can get stuff from this server.
- #
- Require all granted
-
-
-
- Options -Indexes
- ErrorDocument 403 /.noindex.html
-
-
-
- AllowOverride None
- Require all granted
-
-
-<% if node['platform_version'].to_i == 8 -%>
-Alias /.noindex.html /usr/share/httpd/noindex/index.html
-Alias /poweredby.png /usr/share/httpd/icons/apache_pb2.png
-<% else -%>
-Alias /.noindex.html /usr/share/httpd/noindex/index.html
-Alias /noindex/css/bootstrap.min.css /usr/share/httpd/noindex/css/bootstrap.min.css
-Alias /noindex/css/open-sans.css /usr/share/httpd/noindex/css/open-sans.css
-Alias /images/apache_pb.gif /usr/share/httpd/noindex/images/apache_pb.gif
-Alias /images/poweredby.png /usr/share/httpd/noindex/images/poweredby.png
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/.coveralls.yml b/lc-gdn-chef/cookbooks/dovecot/.coveralls.yml
deleted file mode 100644
index 91600595a1beacf54af2ad90ba4e67deac907a13..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.coveralls.yml
+++ /dev/null
@@ -1 +0,0 @@
-service_name: travis-ci
diff --git a/lc-gdn-chef/cookbooks/dovecot/.gitignore b/lc-gdn-chef/cookbooks/dovecot/.gitignore
deleted file mode 100644
index 218c751ad43d1ddbbc39182ba656cca341aea738..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.gitignore
+++ /dev/null
@@ -1,26 +0,0 @@
-.#*
-*~
-*#
-\#*#
-Berksfile.lock
-bin
-bin/*
-.bundle
-.bundle/*
-.cache
-/cookbooks
-coverage
-doc
-Dockerfile-kitchen*
-*.gem
-Gemfile.lock
-.kitchen
-.kitchen.local.yml
-metadata.json
-nodes/
-.*.sw[a-z]
-test/kitchen/.kitchen/
-*.un~
-.vagrant
-vendor
-.yardoc
diff --git a/lc-gdn-chef/cookbooks/dovecot/.kitchen.cloud.yml b/lc-gdn-chef/cookbooks/dovecot/.kitchen.cloud.yml
deleted file mode 100644
index 66da36ed6f8d920bdd9cf1a7d08c817a18d544d0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.kitchen.cloud.yml
+++ /dev/null
@@ -1,54 +0,0 @@
----
-driver:
- aws_ssh_key_id: <%= ENV['AWS_KEYPAIR_NAME'] %>
-
-platforms:
-- name: fedora-19-x64
- driver_plugin: digitalocean
- driver_config:
- flavor: 512MB
-
-- name: fedora-20-x64
- driver_plugin: digitalocean
- driver_config:
- flavor: 512MB
-
-- name: ubuntu-10-04-x64
- driver_plugin: digitalocean
- driver_config:
- flavor: 512MB
- run_list: recipe[apt]
-
-- name: ubuntu-12-04-x64
- driver_plugin: digitalocean
- driver_config:
- flavor: 512MB
- run_list: recipe[apt]
-
-- name: ubuntu-14-04-x64
- driver_plugin: digitalocean
- driver_config:
- flavor: 512MB
- run_list: recipe[apt]
-
-- name: amazon-2011.02.1
- driver_plugin: ec2
- driver_config:
- image_id: ami-8e1fece7
- instance_type: t1.micro
- transport:
- username: ec2-user
-<% if ENV['SSH_AGENT_PID'].nil? %>
- ssh_key: <%= ENV['EC2_SSH_KEY_PATH'] %>
-<% end %>
-
-- name: amazon-2013.09.2
- driver_plugin: ec2
- driver_config:
- image_id: ami-bba18dd2
- instance_type: t1.micro
- transport:
- username: ec2-user
-<% if ENV['SSH_AGENT_PID'].nil? %>
- ssh_key: <%= ENV['EC2_SSH_KEY_PATH'] %>
-<% end %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/.kitchen.docker.yml b/lc-gdn-chef/cookbooks/dovecot/.kitchen.docker.yml
deleted file mode 100644
index 923d598592c9883fe9dd527994b0d3122fcfadce..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.kitchen.docker.yml
+++ /dev/null
@@ -1,26 +0,0 @@
----
-driver:
- name: docker
- use_sudo: false
-
-# If you add new platforms below, include them in the .travis.yml file matrix
-platforms:
-- name: centos-6
-- name: centos-7
-- name: debian-7
- run_list: recipe[apt]
-- name: debian-8
- run_list: recipe[apt]
-- name: oraclelinux-6
- driver_config:
- platform: rhel
-- name: ubuntu-15.10
- run_list: recipe[apt]
-- name: ubuntu-16.04
- run_list: recipe[apt]
-
-# Non-official images with systemd
-- name: scientific-6.6
- driver_config:
- image: ringo/scientific:6.6
- platform: rhel
diff --git a/lc-gdn-chef/cookbooks/dovecot/.kitchen.yml b/lc-gdn-chef/cookbooks/dovecot/.kitchen.yml
deleted file mode 100644
index 1f90ee01213428194f75e8dcd8de0894ecbc0076..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.kitchen.yml
+++ /dev/null
@@ -1,82 +0,0 @@
----
-driver:
- name: vagrant
- network:
- - ["forwarded_port", {guest: 110, host: 8110, auto_correct: true}]
- - ["forwarded_port", {guest: 995, host: 8995, auto_correct: true}]
- - ["forwarded_port", {guest: 143, host: 8143, auto_correct: true}]
- - ["forwarded_port", {guest: 993, host: 8993, auto_correct: true}]
-
-provisioner:
- name: chef_zero
- require_chef_omnibus: true
- data_bags_path: ./test/data_bags
- # client_rb:
- # treat_deprecation_warnings_as_errors: true # WiP on some depends
-
-platforms:
-- name: centos-6.7
-- name: centos-7.2
-- name: debian-7.9
- run_list: recipe[apt]
-- name: debian-8.2
- run_list: recipe[apt]
-- name: fedora-20
-- name: fedora-21
-- name: opensuse-12.3
- driver_config:
- box: opensuse-12.3-64
- box_url: 'http://downloads.sourceforge.net/project/opensusevagrant/12.3/opensuse-12.3-64.box?r=&ts=1441918998&use_mirror=freefr'
-- name: opensuse-13.2
- driver_config:
- box: opscode-opensuse-13.2
- box_url: http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/opscode_opensuse-13.2-x86_64_chef-provisionerless.box
-- name: ubuntu-12.04
- run_list: recipe[apt]
-- name: ubuntu-14.04
- run_list: recipe[apt]
-- name: ubuntu-15.10
- run_list: recipe[apt]
-- name: ubuntu-16.04
- run_list: recipe[apt]
-
-suites:
-- name: default
- run_list:
- - recipe[dovecot_test]
-- name: ldap
- excludes:
- # openldap cookbook does not work
- - centos-6
- - centos-6.6
- - centos-7
- - centos-7.2
- - fedora-20
- - fedora-21
- - opensuse-12.3
- - opensuse-13.2
- - oraclelinux-6
- - scientific-6.6
- run_list:
- - recipe[dovecot_test::ldap]
-- name: attributes
- run_list:
- - recipe[dovecot_test::attributes]
-- name: create_pwfile
- run_list:
- - recipe[dovecot_test::create_pwfile]
- excludes:
- # tests are not working on older versions of dovecot
- - centos-6.7
- - debian-8.2
- - debian-8
- - ubuntu-12.04
- - ubuntu-15.10
- - ubuntu-16.04
- - fedora-20
- - fedora-21
- - opensuse-12.3
- - opensuse-13.2
- - oraclelinux-6
- - scientific-6.6
-
diff --git a/lc-gdn-chef/cookbooks/dovecot/.rubocop.yml b/lc-gdn-chef/cookbooks/dovecot/.rubocop.yml
deleted file mode 100644
index 387191bba449322cf85f748c46214dd62b02002f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.rubocop.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-AllCops:
- Exclude:
- - .kitchen
- - vendor/**/*
-Metrics/ModuleLength:
- Max: 121
diff --git a/lc-gdn-chef/cookbooks/dovecot/.travis.yml b/lc-gdn-chef/cookbooks/dovecot/.travis.yml
deleted file mode 100644
index 636fe47b922e76cf6bc3f82830645e6758931725..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.travis.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-rvm: 2.2
-
-sudo: required
-
-services: docker
-
-env:
- matrix:
- - TESTS="style unit"
-# Split up the test-kitchen run to avoid exceeding 50 minutes:
- - TESTS="integration[default-centos-6,verify]"
- - TESTS="integration[default-debian-7,verify]"
- - TESTS="integration[default-debian-8,verify]"
- - TESTS="integration[default-oraclelinux-6,verify]"
- - TESTS="integration[default-ubuntu-1510,verify]"
- - TESTS="integration[default-ubuntu-1604,verify]"
- - TESTS="integration[default-scientific-66,verify]"
- - TESTS="integration[ldap-debian-7,verify]"
- - TESTS="integration[ldap-debian-8,verify]"
- - TESTS="integration[ldap-ubuntu-1510,verify]"
- - TESTS="integration[ldap-ubuntu-1604,verify]"
- - TESTS="integration[attributes-centos-6,verify]"
- - TESTS="integration[attributes-debian-7,verify]"
- - TESTS="integration[attributes-debian-8,verify]"
- - TESTS="integration[attributes-oraclelinux-6,verify]"
- - TESTS="integration[attributes-ubuntu-1510,verify]"
- - TESTS="integration[attributes-ubuntu-1604,verify]"
- - TESTS="integration[attributes-scientific-66,verify]"
- - TESTS="integration[create-pwfile-centos-72,verify]"
- - TESTS="integration[create-pwfile-debian-79,verify]"
- - TESTS="integration[create-pwfile-ubuntu-14.04,verify]"
-
-before_install:
-- chef --version &> /dev/null || curl -L https://www.getchef.com/chef/install.sh | sudo bash -s -- -P chefdk -v 1.2.22
-- eval "$(/opt/chefdk/bin/chef shell-init bash)"
-
-install:
-- chef exec bundle install --jobs=3 --retry=3 --without='doc integration_vagrant integration_cloud guard'
-
-before_script:
-# https://github.com/zuazo/kitchen-in-travis-native/issues/1#issuecomment-142455888
-- sudo iptables -L DOCKER || ( echo "DOCKER iptables chain missing" ; sudo iptables -N DOCKER )
-- chef --version
-- cookstyle --version
-- foodcritic --version
-
-script: travis_retry chef exec bundle exec rake $TESTS
diff --git a/lc-gdn-chef/cookbooks/dovecot/.yardopts b/lc-gdn-chef/cookbooks/dovecot/.yardopts
deleted file mode 100644
index 4019412b3dda49b3a48a331f7566f17ddf7213b5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/.yardopts
+++ /dev/null
@@ -1,6 +0,0 @@
---markup markdown
---no-private
---exclude test
-'*/**/*.rb'
--
-*.md
diff --git a/lc-gdn-chef/cookbooks/dovecot/Berksfile b/lc-gdn-chef/cookbooks/dovecot/Berksfile
deleted file mode 100644
index 759fc8254dca86484e0d66e0c5ead220c707479f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/Berksfile
+++ /dev/null
@@ -1,33 +0,0 @@
-# encoding: UTF-8
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-source 'https://supermarket.chef.io'
-my_cookbook = ::File.basename(Dir.pwd).sub(/[-_]?cookbook$/, '')
-
-# Helper to include a local cookbook from disk
-def local_cookbook(name, version = '>= 0.0.0', options = {})
- cookbook(name, version, {
- path: "../../cookbooks/#{name}"
- }.merge(options))
-end
-
-metadata
-cookbook 'apt'
-
-# Minitest Chef Handler
-# More info at https://github.com/calavera/minitest-chef-handler
-if ::File.directory?(::File.join('files', 'default', 'tests', 'minitest')) ||
- ::File.directory?(
- ::File.join(
- 'test', 'cookbooks', "#{my_cookbook}_test", 'files', 'default', 'tests',
- 'minitest'
- )
- )
- cookbook 'minitest-handler'
-end
-
-# Integration tests cookbook:
-if ::File.directory?("./test/cookbooks/#{my_cookbook}_test")
- cookbook "#{my_cookbook}_test", path: "./test/cookbooks/#{my_cookbook}_test"
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/CHANGELOG.md b/lc-gdn-chef/cookbooks/dovecot/CHANGELOG.md
deleted file mode 100644
index 4739faa3b7e4b6ca86b70cc65000c45b2edc1c27..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/CHANGELOG.md
+++ /dev/null
@@ -1,188 +0,0 @@
-# Change Log
-All notable changes to the `dovecot` cookbook will be documented in this file.
-
-The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/).
-
-## [3.2.1] - 2017-03-10
-### Fixed
-- Unit Tests fix: Replace Ubuntu `13.10` with Ubuntu `14.04`.
-
-## [3.2.0] - 2017-03-10
-Special thanks to [Vassilis Aretakis](https://github.com/billiaz) for his astonishing contributions :sparkles:
-
-### Added
-- Add pwfilesupport ([issue #25](https://github.com/zuazo/dovecot-cookbook/pull/25), thanks to [Vassilis Aretakis](https://github.com/billiaz), [Sjoerd Tromp](https://github.com/stromp) and [Sander van Harmelen](https://github.com/svanharmelen)).
-- metadata: Add `chef_version`.
-- README: Add rubydoc and inch-ci badges.
-
-### Changed
-- CHANGELOG: Follow "Keep a CHANGELOG".
-
-## [3.1.0] - 2017-02-27
-### Added
-- Service: Add support for Dovecot Replication ([issue #24](https://github.com/zuazo/dovecot-cookbook/pull/24), thanks [Vassilis Aretakis](https://github.com/billiaz)).
-
-### Changed
-- Change libraries namespace from `Dovecot` to `DovecotCookbook`.
-
-## [3.0.0] - 2016-10-09
-### Added
-- Add support for services: quota-status, quota-warning, doveadm ([issue #18](https://github.com/zuazo/dovecot-cookbook/pull/18), thanks [Edgaras Lukosevicius](https://github.com/ledgr)).
-- Change dovecot users homedir separate attribute ([issue #21](https://github.com/zuazo/dovecot-cookbook/pull/21), thanks [Edgaras Lukosevicius](https://github.com/ledgr)).
-- Update all configuration files to Dovecot `2.2.23`.
-- Rewrite Ohai plugin to support Ohai cookbook version `4` ([issue #23](https://github.com/zuazo/dovecot-cookbook/pull/23), thanks [Edgaras Lukosevicius](https://github.com/ledgr)).
-- README: Add license badge and improve the badges position.
-
-### Changed
-- Improve TESTING documentation.
-
-### Fixed
-- Ubuntu >= `15.10` support.
-- Always create dovenull user.
-- Ohai plugin: Use `#shell_out` instead of `run_command` (fix Ohai `9` support).
-- Fix metadata error in a *calculated* field documentation.
-- Fix RuboCop offenses in ohai plugins.
-
-### Removed
-- Drop Chef `11` support (required by ohai dependency).
-- Drop Ruby `< 2.2` support.
-
-## [2.5.0] - 2016-07-28
-### Changed
-* metadata: Add ohai dependency version constraint for version `3` (fixes [#22](https://github.com/zuazo/dovecot-cookbook/issues/22), thanks [Markus Wagner](https://github.com/zuazo/dovecot-cookbook/issues/22) for reporting).
-
-## [2.4.0] - 2015-09-11
-### Added
-- SUSE and OpenSUSE support ([issue #16](https://github.com/zuazo/dovecot-cookbook/issues/16), thanks [Marcus Klein](https://github.com/kleini) for the help).
-- Oracle Linux support.
-- Scientific Linux support.
-
-### Fixed
-- Ubuntu `15.04` support.
-
-## [2.3.0] - 2015-08-30
-### Added
-- metadata: Add `source_url` and `issues_url`.
-
-### Changed
-- Update chef links to use *chef.io* domain.
-- Update contact information and links after migration.
-- README: Improve description.
-
-## [2.2.2] - 2015-07-29
-### Changed
-- README:
- - Use markdown tables.
- - Add GitHub source badge.
-
-## [2.2.1] - 2015-07-23
-### Fixed
-- Travis CI: Fix `test-kitchen` integration tests.
-
-## [2.2.0] - 2015-07-22
-### Fixed
-- Fix Debian Jessie support ([issue #15](https://github.com/zuazo/dovecot-cookbook/issues/15), thanks [Marcus Klein](https://github.com/kleini) for the help).
-
-### Changed
-- README: Improve examples and some fixes.
-
-## [2.1.0] - 2015-04-04
-### Changed
-- Update all configuration files to Dovecot `2.2.16`.
-- Update RuboCop to `0.29.1` (new offenses fixed).
-- README: Add a TOC.
-- Update Chef links to point to *chef.io*.
-
-### Fixed
-- Improve LDAP support, including integration tests ([issue #12](https://github.com/zuazo/dovecot-cookbook/issues/12), thanks [Dr. Ogg](https://github.com/neallawson) for reporting).
-
-## [2.0.0] - 2014-10-28
-### Changed
-- Delete existing configuration files if they are not required.
-- Use the `conf_files_user` attribute for configuration file directories owner instead of a hardcoded `'root'`.
-- Some libraries refactored.
-- `Dovecot::Auth`: remove code duplication.
-- Homogenize license headers.
-- README improvements.
-
-### Removed
-- Drop Ruby `< 1.9.3` support.
-- Remove deprecated `::package` recipe.
-
-### Fixed
-- Fix ohai `7` plugin *"`provides` unsupported operation"* warning.
-- Fix all RuboCop and Foodcritic offenses.
-
-## [1.0.1] - 2014-10-01
-### Added
-- README: Added Cookbook badge.
-- Added TODO file.
-
-### Fixed
-- Ubuntu `14.04` support.
-
-## [1.0.0] - 2014-04-23
-### Added
-- `from_package` recipe ([issue #8](https://github.com/zuazo/dovecot-cookbook/pull/8), thanks [Jordi Llonch](https://github.com/llonchj)).
-- Ohai 7 plugins support ([issue #10](https://github.com/zuazo/dovecot-cookbook/pull/10)).
-
-### Changed
-- Package installation and template generation logic standardized using a `Dovecot::Conf#require?` method ([issue #9](https://github.com/zuazo/dovecot-cookbook/pull/9), thanks [Jordi Llonch](https://github.com/llonchj) for the help).
- - ***Note:*** This change is ***huge***, but it is supposed to be backwards compatible.
-
-### Deprecated
-- `packages` recipe marked for future deprecation ([issue #8](https://github.com/zuazo/dovecot-cookbook/pull/8), thanks [Jordi Llonch](https://github.com/llonchj)).
-
-### Fixed
-- README: fixed some typos ([issue #7](https://github.com/zuazo/dovecot-cookbook/pull/7), thanks [Jordi Llonch](https://github.com/llonchj)).
-
-## [0.3.1] - 2014-03-15
-### Changed
-- README: some env variables description improved.
-
-### Fixed
-- Fixed Ubuntu `13.10` support.
-
-## [0.3.0] - 2014-03-13
-### Added
-- Dovecot Ohai Plugin.
-- Tested to work on Fedora 18, Fedora 19 and Amazon.
-- Add support for anvil service ([issue #5](https://github.com/zuazo/dovecot-cookbook/pull/5), thanks [Johan Svensson](https://github.com/loxley)).
-
-### Changed
-- Protect sensitive config files from read ([issue #4](https://github.com/zuazo/dovecot-cookbook/pull/4), thanks [claudex](https://github.com/claudex)).
-
-### Fixed
-- Auth-definitions below ldap are a hash, not an array ([issue #6](https://github.com/zuazo/dovecot-cookbook/pull/6), thanks [Arnold Krille](https://github.com/kampfschlaefer)).
-
-## [0.2.0] - 2013-10-28
-### Added
-- Dict auth support and mailbox_list_index ([issue #3](https://github.com/zuazo/dovecot-cookbook/pull/3), thanks [Johan Svensson](https://github.com/loxley)).
-
-## [0.1.1] - 2013-07-19
-### Fixed
-- Typo in auth-passwdfile.conf template ([issue #2](https://github.com/zuazo/dovecot-cookbook/pull/2), thanks [Trond Arve Nordheim](https://github.com/tanordheim)).
-- Fix typo in README.md ([issue #1](https://github.com/zuazo/dovecot-cookbook/pull/1), thanks [Andreas Lappe](https://github.com/alappe)).
-
-## 0.1.0 - 2013-06-08
-- Initial release of `dovecot`.
-
-[Unreleased]: https://github.com/zuazo/dovecot-cookbook/compare/3.2.1...HEAD
-[3.2.1]: https://github.com/zuazo/dovecot-cookbook/compare/3.2.0...3.2.1
-[3.2.0]: https://github.com/zuazo/dovecot-cookbook/compare/3.1.0...3.2.0
-[3.1.0]: https://github.com/zuazo/dovecot-cookbook/compare/3.0.0...3.1.0
-[3.0.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.5.0...3.0.0
-[2.5.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.4.0...2.5.0
-[2.4.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.3.0...2.4.0
-[2.3.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.2.2...2.3.0
-[2.2.2]: https://github.com/zuazo/dovecot-cookbook/compare/2.2.1...2.2.2
-[2.2.1]: https://github.com/zuazo/dovecot-cookbook/compare/2.2.0...2.2.1
-[2.2.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.1.0...2.2.0
-[2.1.0]: https://github.com/zuazo/dovecot-cookbook/compare/2.0.0...2.1.0
-[2.0.0]: https://github.com/zuazo/dovecot-cookbook/compare/1.0.1...2.0.0
-[1.0.1]: https://github.com/zuazo/dovecot-cookbook/compare/1.0.0...1.0.1
-[1.0.0]: https://github.com/zuazo/dovecot-cookbook/compare/0.3.1...1.0.0
-[0.3.1]: https://github.com/zuazo/dovecot-cookbook/compare/0.3.0...0.3.1
-[0.3.0]: https://github.com/zuazo/dovecot-cookbook/compare/0.2.0...0.3.0
-[0.2.0]: https://github.com/zuazo/dovecot-cookbook/compare/0.1.1...0.2.0
-[0.1.1]: https://github.com/zuazo/dovecot-cookbook/compare/0.1.0...0.1.1
diff --git a/lc-gdn-chef/cookbooks/dovecot/CONTRIBUTING.md b/lc-gdn-chef/cookbooks/dovecot/CONTRIBUTING.md
deleted file mode 100644
index 8ecaf77b6de8629b43568bbf787505d5346d73e6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/CONTRIBUTING.md
+++ /dev/null
@@ -1,11 +0,0 @@
-Contributing
-============
-
-1. [Fork the repository on Github](https://help.github.com/articles/fork-a-repo).
-2. Create a named feature branch (`$ git checkout -b my-new-feature`).
-3. Write tests for your change (if applicable).
-4. Write your change.
-5. [Run the tests](https://github.com/zuazo/dovecot-cookbook/blob/master/TESTING.md), ensuring they all pass (`$ bundle exec rake`).
-6. Commit your change (`$ git commit -am 'Add some feature'`).
-7. Push to the branch (`$ git push origin my-new-feature`).
-8. [Submit a Pull Request using Github](https://help.github.com/articles/creating-a-pull-request).
diff --git a/lc-gdn-chef/cookbooks/dovecot/Gemfile b/lc-gdn-chef/cookbooks/dovecot/Gemfile
deleted file mode 100644
index 19c8f52dbda7ab1acd5a8fcf43d1ccbf0d2ca6ef..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/Gemfile
+++ /dev/null
@@ -1,64 +0,0 @@
-# encoding: UTF-8
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# More info at http://bundler.io/gemfile.html
-#
-# Many of the gem versions installed here are based on the versions installed
-# by ChefDK.
-
-source 'https://rubygems.org'
-
-chef_version = ENV.key?('CHEF_VERSION') ? ENV['CHEF_VERSION'] : nil
-
-group :doc do
- gem 'yard', '~> 0.9.5'
-end
-
-group :test do
- gem 'rake'
- gem 'berkshelf', '~> 5.1'
-end
-
-group :style do
- gem 'foodcritic', '~> 6.3.0'
- gem 'rubocop', '~> 0.39.0'
-end
-
-group :unit do
- gem 'chef', chef_version unless chef_version.nil?
- gem 'chefspec', '~> 6.0'
- gem 'simplecov', '~> 0.13.0'
- gem 'should_not', '~> 1.1'
-end
-
-group :integration do
- gem 'test-kitchen', '~> 1.13'
-end
-
-group :integration_docker do
- gem 'kitchen-docker', '~> 2.1'
-end
-
-group :integration_vagrant do
- gem 'vagrant-wrapper', '~> 2.0'
- gem 'kitchen-vagrant', '~> 1.0'
-end
-
-group :integration_cloud do
- gem 'kitchen-ec2', '~> 1.2'
- gem 'kitchen-digitalocean', '~> 0.9.5'
-end
-
-group :guard do
- gem 'guard', '~> 2.14'
- gem 'guard-foodcritic', '~> 2.1'
- gem 'guard-rubocop', '~> 1.1'
- gem 'guard-rspec', '~> 4.3'
- # Temporary disabled: Error is: cannot load such file -- guard/kitchen
- # gem 'guard-kitchen', '~> 0.0'
-end
-
-group :travis do
- gem 'coveralls', '~> 0.7', require: false
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/Guardfile b/lc-gdn-chef/cookbooks/dovecot/Guardfile
deleted file mode 100644
index f19a2dc4e7554d7a0e09582a205c2a3549226274..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/Guardfile
+++ /dev/null
@@ -1,89 +0,0 @@
-# encoding: UTF-8
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# More info at https://github.com/guard/guard#readme
-
-# Style Tests
-# ===========
-# - Foodcritic
-# - RuboCop
-
-group :style,
- halt_on_fail: true do
- guard :foodcritic,
- cli: '--exclude test/unit',
- cookbook_paths: '.',
- all_on_start: false do
- watch(%r{attributes/.+\.rb$})
- watch(%r{definitions/.+\.rb$})
- watch(%r{libraries/.+\.rb$})
- watch(%r{providers/.+\.rb$})
- watch(%r{recipes/.+\.rb$})
- watch(%r{resources/.+\.rb$})
- watch(%r{templates/.+\.erb$})
- watch('metadata.rb')
- end
-
- guard :rubocop,
- all_on_start: false do
- watch(/.+\.rb$/)
- watch('Gemfile')
- watch('Rakefile')
- watch('Capfile')
- watch('Guardfile')
- watch('Podfile')
- watch('Thorfile')
- watch('Vagrantfile')
- watch('Berksfile')
- watch('Cheffile')
- watch('Vagabondfile')
- end
-end # group style
-
-# Unit Tests
-# ==========
-# - test/unit/libraries/${library}_spec.rb: Unit tests for libraries.
-# - test/unit/recipes/${recipe}_spec.rb: ChefSpec tests for recipes.
-# - test/unit/resources/${resource}_spec.rb: ChefSpec tests for resources.
-
-group :unit do
- guard :rspec,
- cmd: 'bundle exec rake unit',
- all_on_start: false do
- watch(%r{^libraries/(.+)\.rb$}) do |m|
- "test/unit/libraries/#{m[1]}_spec.rb"
- end
- watch(%r{^recipes/(.+)\.rb$}) { |m| "test/unit/recipes/#{m[1]}_spec.rb" }
- watch(%r{^(?:providers|resources)/(.+)\.rb$}) do |m|
- "test/unit/resources/#{m[1]}_spec.rb"
- end
- watch(%r{^test/unit/.+_spec\.rb$})
- watch('test/unit/spec_helper.rb') { 'spec' }
- end
-end # group unit
-
-# Integration Tests
-# =================
-# - test-kitchen
-#
-# Temporary disabled. See the Gemfile.
-
-# group :integration do
-# guard 'kitchen',
-# all_on_start: false do
-# watch(%r{attributes/.+\.rb$})
-# watch(%r{definitions/.+\.rb$})
-# watch(%r{libraries/.+\.rb$})
-# watch(%r{providers/.+\.rb$})
-# watch(%r{recipes/.+\.rb$})
-# watch(%r{resources/.+\.rb$})
-# watch(%r{files/.+})
-# watch(%r{templates/.+\.erb$})
-# watch('metadata.rb')
-# watch(%r{test/.+$})
-# watch('Berksfile')
-# end
-# end # group integration
-
-scope groups: [:style, :unit]
diff --git a/lc-gdn-chef/cookbooks/dovecot/LICENSE b/lc-gdn-chef/cookbooks/dovecot/LICENSE
deleted file mode 100644
index f30925a0ac474e64d59c9ce5f828575bf4e1e47e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/LICENSE
+++ /dev/null
@@ -1,190 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-Copyright 2013-2015 Xabier de Zuazo
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/dovecot/README.md b/lc-gdn-chef/cookbooks/dovecot/README.md
deleted file mode 100644
index 6beb1a612cb026815b5d8d609082ba80664f00e5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/README.md
+++ /dev/null
@@ -1,1144 +0,0 @@
-Dovecot Cookbook
-================
-[](http://www.rubydoc.info/github/zuazo/dovecot-cookbook)
-[](https://github.com/zuazo/dovecot-cookbook)
-[](#license-and-author)
-
-[](https://supermarket.chef.io/cookbooks/dovecot)
-[](https://gemnasium.com/zuazo/dovecot-cookbook)
-[](https://codeclimate.com/github/zuazo/dovecot-cookbook)
-[](https://travis-ci.org/zuazo/dovecot-cookbook)
-[](http://inch-ci.org/github/zuazo/dovecot-cookbook)
-
-[Chef](https://www.chef.io/) cookbook to install and configure [Dovecot](http://www.dovecot.org/), open source IMAP and POP3 email server.
-
-Table of Contents
-=================
-
-* [Description](#description)
-* [Requirements](#requirements)
- * [Supported Platforms](#supported-platforms)
- * [Required Cookbooks](#required-cookbooks)
- * [Required Applications](#required-applications)
-* [Attributes](#attributes)
- * [Main Configuration Attributes](#main-configuration-attributes)
- * [Authentication Processes Attributes](#authentication-processes-attributes)
- * [Director-specific Attributes](#director-specific-attributes)
- * [Log Destination Attributes](#log-destination-attributes)
- * [Mailbox Locations and Namespaces Attributes](#mailbox-locations-and-namespaces-attributes)
- * [Master Configuration File Attributes](#master-configuration-file-attributes)
- * [SSL Attributes](#ssl-attributes)
- * [LDA Specific Attributes](#lda-specific-attributes)
- * [Replication Specific Attributes](#replication-specific-attributes)
- * [LMTP Specific Attributes](#lmtp-specific-attributes)
- * [Berkeley DB DB_CONFIG Attributes](#berkeley-db-db_config-attributes)
- * [Dictionary Quota SQL Attributes](#dictionary-quota-sql-attributes)
- * [LDAP Authentication Attributes](#ldap-authentication-attributes)
- * [SQL Authentication Attributes](#sql-authentication-attributes)
- * [Distribution Package Names Attributes](#distribution-package-names-attributes)
- * [Distribution Service Configuration](#distribution-service-configuration)
-* [Recipes](#recipes)
- * [dovecot::default](#dovecotdefault)
- * [dovecot::user](#dovecotuser)
- * [dovecot::conf_files](#dovecotconf_files)
- * [dovecot::ohai_plugin](#dovecotohai_plugin)
- * [dovecot::from_package](#dovecotfrom_package)
- * [dovecot::service](#dovecotservice)
- * [dovecot::create_pwfile](#dovecotcreate_pwfile)
-* [Ohai Plugin](#ohai-plugin)
-* [Usage Examples](#usage-examples)
- * [Including in a Cookbook Recipe](#including-in-a-cookbook-recipe)
- * [Including in the Run List](#including-in-the-run-list)
- * [Authentication Database Examples](#authentication-database-examples)
- * [Dictionary Quota SQL Example](#dictionary-quota-sql-example)
- * [Namespaces Example](#namespaces-example)
- * [Plugins Examples](#plugins-examples)
- * [Mail Log Plugin Example](#mail-log-plugin-example)
- * [Sieve Plugin Example](#sieve-plugin-example)
- * [Protocols Examples](#protocols-examples)
- * [Service Examples](#service-examples)
- * [Director Service Example](#director-service-example)
- * [Imap-login Service Example](#imap-login-service-example)
- * [Doveadm Service Example](#doveadm-service-example)
- * [Quota-status Service Example](#quota-status-service-example)
- * [Quota-warning Service Example](#quota-warning-service-example)
- * [LDAP Example](#ldap-example)
- * [Password File Example](#password-file-example)
- * [A Complete Example](#a-complete-example)
-* [Testing](#testing)
-* [Contributing](#contributing)
-* [TODO](#todo)
-* [License and Author](#license-and-author)
-
-Requirements
-============
-
-## Supported Platforms
-
-This cookbook has been tested on the following platforms:
-
-* Amazon
-* CentOS `>= 6.0`
-* Debian `>= 7.0`
-* Fedora `>= 18.0`
-* OpenSUSE
-* Oracle Linux `>= 6.0`
-* Scientific Linux `>= 6.0`
-* SUSE
-* Ubuntu `>= 12.04`
-
-Let me know if you use it successfully on any other platform.
-
-## Required Cookbooks
-
-* [ohai](https://supermarket.chef.io/cookbooks/ohai)
-
-## Required Applications
-
-* Chef `12` or higher.
-* Ruby `2.2` or higher.
-* **Dovecot `>= 2`**: requires this version of dovecot to be available by the distribution's package manager.
-
-Attributes
-==========
-
-To see a more complete description of the attributes, go to the [Dovecot wiki2 configuration section](http://wiki2.dovecot.org/#Dovecot_configuration) or read the comments in the templates and generated configuration files.
-
-| Attribute | Default | Description |
-|:--------------------------------------------------|:---------------------------|:-------------------------------|
-| `node['dovecot']['install_from']` | `'package'` | Determines how Dovecot is installed from. Only `'package'` is supported for now.
-| `node['dovecot']['user']` | `'dovecot'` | Dovecot system user. Should no be changed.
-| `node['dovecot']['group']` | `'dovecot'` | Dovecot system group. Should no be changed.
-| `node['dovecot']['user_homedir']` | *calculated* | Dovecot system user home directory.
-| `node['dovecot']['lib_path']` | *calculated* | Dovecot library path. Should no be changed.
-| `node['dovecot']['conf_path']` | `'/etc/dovecot'` | Dovecot configuration files path. Should no be changed.
-| `node['dovecot']['conf_files_user']` | `'root'` | System user owner of configuration files.
-| `node['dovecot']['conf_files_group']` | `node['dovecot']['group']` | System group owner of configuration files.
-| `node['dovecot']['conf_files_mode']` | `00644` | Configuration files system file mode bits.
-| `node['dovecot']['sensitive_files']` | `['*.conf.ext']` | An array of dovecot sensitive configuration files. Each array item can be a glob expression or a fixed file name. These file names should be relative to `node['dovecot']['conf_path']` directory. Example: `['dovecot-sql.conf.ext', '*-auth.conf.ext', 'conf.d/auth-supersecret.conf.ext']`.
-| `node['dovecot']['sensitive_files_mode']` | `00640` | Configuration files system file mode bits for sensitve files.
-| `node['dovecot']['conf_files']['core']` | *calculated* | Dovecot core configuration files list.
-| `node['dovecot']['conf_files']['imap']` | `['conf.d/20-imap.conf']` | Dovecot IMAP configuration files list.
-| `node['dovecot']['conf_files']['pop3']` | `['conf.d/20-pop3.conf']` | Dovecot POP3 configuration files list.
-| `node['dovecot']['conf_files']['lmtp']` | `['conf.d/20-lmtp.conf']` | Dovecot LMTP configuration files list.
-| `node['dovecot']['conf_files']['sieve']` | *calculated* | Dovecot Sieve configuration files list.
-| `node['dovecot']['conf_files']['ldap']` | *calculated* | Dovecot LDAP configuration files list.
-| `node['dovecot']['auth']` | `{}` | Dovecot Authentication Databases as a hash of hashes ([see the examples below](#authentication-database-examples)). Supported authdbs: checkpassword, deny, ldap, master, passwdfile, sql, system and vpopmail.
-| `node['dovecot']['namespaces']` | `[]` | Dovecot Namespaces as an array of hashes ([see the example below](#namespaces-example)).
-| `node['dovecot']['plugins']` | *calculated* | Dovecot Plugins configuration as a hash of hashes ([see the examples below](#plugins-examples)). Supported plugins: mail_log, acl and quota.
-| `node['dovecot']['protocols']` | `{}` | Dovecot Protocols configuration as a hash of hashes ([see the example below](#protocols-example)). Supported protocols: lda, imap, lmtp, sieve and pop3.
-| `node['dovecot']['services']` | `{}` | Dovecot Services configuration as a hash of hashes ([see the examples below](#service-examples)). Supported services: anvil, director, imap-login, pop3-login, lmtp, imap, pop3, auth, auth-worker, dict, tcpwrap, managesieve-login managesieve, quota-status, quota-warning and doveadm.
-| `node['dovecot']['conf']['mail_plugins']` | `[]` | Dovecot default enabled mail_plugins.
-| `node['dovecot']['ohai_plugin']['build-options']` | `true` | Whether to enable reading build options inside ohai plugin. Can be disabled to be lighter.
-| `node['dovecot']['databag_name']` | `dovecot` | The databag to use.
-| `node['dovecot']['databag_users_item']` | `users` | The databag item to use for User's database (Passwords).
-| `node['dovecot']['conf']['password_file']` | `#{node['dovecot']['conf_path']}/password` | The Password file location
-
-## Main Configuration Attributes
-
-* Configuration file: `dovecot.conf`.
-
-| Attribute | Default | Description |
-|:----------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['listen']` | *nil* | A comma separated list of IPs or hosts where to listen in for connections.
-| `node['dovecot']['conf']['base_dir']` | *nil* | Base directory where to store runtime data.
-| `node['dovecot']['conf']['instance_name']` | *nil* | Name of this instance. Used to prefix all Dovecot processes in ps output.
-| `node['dovecot']['conf']['login_greeting']` | *nil* | Greeting message for clients.
-| `node['dovecot']['conf']['login_trusted_networks']` | *nil* | Space separated list of trusted network ranges.
-| `node['dovecot']['conf']['login_access_sockets']` | *nil* | Space separated list of login access check sockets.
-| `node['dovecot']['conf']['auth_proxy_self']` | *nil* | With proxy_maybe=yes if proxy destination matches any of these IPs, don't do proxying.
-| `node['dovecot']['conf']['verbose_proctitle']` | *nil* | Show more verbose process titles (in ps).
-| `node['dovecot']['conf']['shutdown_clients']` | *nil* | Should all processes be killed when Dovecot master process shuts down.
-| `node['dovecot']['conf']['doveadm_worker_count']` | *nil* | If non-zero, run mail commands via this many connections to doveadm server.
-| `node['dovecot']['conf']['doveadm_socket_path']` | *nil* | UNIX socket or host:port used for connecting to doveadm server.
-| `node['dovecot']['conf']['import_environment']` | *nil* | Space separated list of environment variables that are preserved on Dovecot startup and his childs.
-| `node['dovecot']['conf']['dict']` | *nil* | Dictionary server settings as a hash.
-
-## Authentication Processes Attributes
-
-* Configuration file: `conf.d/10-auth.conf`.
-
-| Attribute | Default | Description |
-|:----------------------------------------------------------|:----------|:-------------------------------|
-| `node['dovecot']['conf']['disable_plaintext_auth']` | *nil* | Disable LOGIN command and all other plaintext authentications unless SSL/TLS is used.
-| `node['dovecot']['conf']['auth_cache_size']` | *nil* | Authentication cache size (e.g. 10M). 0 means it's disabled.
-| `node['dovecot']['conf']['auth_cache_ttl']` | *nil* | Time to live for cached data.
-| `node['dovecot']['conf']['auth_cache_negative_ttl']` | *nil* | TTL for negative hits (user not found, password mismatch).
-| `node['dovecot']['conf']['auth_realms']` | *nil* | Space separated list (or array) of realms for SASL authentication mechanisms that need them.
-| `node['dovecot']['conf']['auth_default_realm']` | *nil* | Default realm/domain to use if none was specified.
-| `node['dovecot']['conf']['auth_username_chars']` | *nil* | List of allowed characters in username.
-| `node['dovecot']['conf']['auth_username_translation']` | *nil* | Username character translations before it's looked up from databases.
-| `node['dovecot']['conf']['auth_username_format']` | *nil* | Username formatting before it's looked up from databases.
-| `node['dovecot']['conf']['auth_master_user_separator']` | *nil* | If you want to allow master users to log in by specifying the master username within the normal username string, you can specify the separator character here (format: ``).
-| `node['dovecot']['conf']['auth_anonymous_username']` | *nil* | Username to use for users logging in with ANONYMOUS SASL mechanism.
-| `node['dovecot']['conf']['auth_worker_max_count']` | *nil* | Maximum number of dovecot-auth worker processes.
-| `node['dovecot']['conf']['auth_gssapi_hostname']` | *nil* | Host name to use in GSSAPI principal names.
-| `node['dovecot']['conf']['auth_krb5_keytab']` | *nil* | Kerberos keytab to use for the GSSAPI mechanism.
-| `node['dovecot']['conf']['auth_use_winbind']` | *nil* | Do NTLM and GSS-SPNEGO authentication using Samba's winbind daemon and ntlm_auth helper.
-| `node['dovecot']['conf']['auth_winbind_helper_path']` | *nil* | Path for Samba's ntlm_auth helper binary.
-| `node['dovecot']['conf']['auth_failure_delay']` | *nil* | Time to delay before replying to failed authentications.
-| `node['dovecot']['conf']['auth_ssl_require_client_cert']` | *nil* | Take the username from client's SSL certificate, using X509_NAME_get_text_by_NID() which returns the subject's DN's CommonName.
-| `node['dovecot']['conf']['auth_mechanisms']` | `'plain'` | Space separated list of wanted authentication mechanisms: plain, login, digest-md5, cram-md5, ntlm, rpa, apop, anonymous, gssapi, otp, skey, gss-spnego.
-
-## Director-specific Attributes
-
-* Configuration file: `conf.d/10-director.conf`.
-
-| Attribute | Default | Description |
-|:----------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['director_servers']` | *nil* | List of IPs or hostnames to all director servers, including ourself (as a string or as an array).
-| `node['dovecot']['conf']['director_mail_servers']` | *nil* | List of IPs or hostnames to all backend mail servers.
-| `node['dovecot']['conf']['director_user_expire']` | *nil* | How long to redirect users to a specific server after it no longer has any connections.
-| `node['dovecot']['conf']['director_doveadm_port']` | *nil* | TCP/IP port that accepts doveadm connections (instead of director connections).
-| `node['dovecot']['conf']['director_username_hash']` | *nil* | How the username is translated before being hashed.
-
-## Log Destination Attributes
-
-* Configuration file: `conf.d/10-logging.conf`.
-
-| Attribute | Default | Description |
-|:-------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['log_path']` | *nil* | Log file to use for error messages. "syslog" logs to syslog, /dev/stderr logs to stderr.
-| `node['dovecot']['conf']['info_log_path']` | *nil* | Log file to use for informational messages. Defaults to log_path.
-| `node['dovecot']['conf']['debug_log_path']` | *nil* | Log file to use for debug messages. Defaults to info_log_path.
-| `node['dovecot']['conf']['syslog_facility']` | *nil* | Syslog facility to use if you're logging to syslog.
-| `node['dovecot']['conf']['auth_verbose']` | *nil* | Log unsuccessful authentication attempts and the reasons why they failed.
-| `node['dovecot']['conf']['auth_verbose_passwords']` | *nil* | In case of password mismatches, log the attempted password.
-| `node['dovecot']['conf']['auth_debug']` | *nil* | Even more verbose logging for debugging purposes.
-| `node['dovecot']['conf']['auth_debug_passwords']` | *nil* | In case of password mismatches, log the passwords and used scheme so the problem can be debugged.
-| `node['dovecot']['conf']['mail_debug']` | *nil* | Enable mail process debugging.
-| `node['dovecot']['conf']['verbose_ssl']` | *nil* | Show protocol level SSL errors.
-| `node['dovecot']['conf']['log_timestamp']` | *nil* | Prefix for each line written to log file.
-| `node['dovecot']['conf']['login_log_format_elements']` | *nil* | Space-separated list (or array) of elements we want to log.
-| `node['dovecot']['conf']['login_log_format']` | *nil* | Login log format.
-| `node['dovecot']['conf']['mail_log_prefix']` | *nil* | Log prefix for mail processes.
-| `node['dovecot']['conf']['deliver_log_format']` | *nil* | Format to use for logging mail deliveries.
-
-## Mailbox Locations and Namespaces Attributes
-
-* Configuration file: `conf.d/10-mail.conf`.
-
-| Attribute | Default | Description |
-|:-----------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['mail_location']` | *nil* | Location for user's mailboxes.
-| `node['dovecot']['conf']['mail_shared_explicit_inbox']` | *nil* | Should shared INBOX be visible as "shared/user" or "shared/user/INBOX"?.
-| `node['dovecot']['conf']['mail_uid']` | *nil* | System user used to access mails.
-| `node['dovecot']['conf']['mail_gid']` | *nil* | System group used to access mails.
-| `node['dovecot']['conf']['mail_privileged_group']` | *nil* | Group to enable temporarily for privileged operations.
-| `node['dovecot']['conf']['mail_access_groups']` | *nil* | Grant access to these supplementary groups for mail processes.
-| `node['dovecot']['conf']['mail_full_filesystem_access']` | *nil* | Allow full filesystem access to clients.
-| `node['dovecot']['conf']['mail_attribute_dict']` | *nil* | Dictionary for key=value mailbox attributes.
-| `node['dovecot']['conf']['mail_server_comment']` | *nil* | A comment or note that is associated with the server.
-| `node['dovecot']['conf']['mail_server_admin']` | *nil* | Indicates a method for contacting the server administrator. This value MUST be a URI.
-| `node['dovecot']['conf']['mmap_disable']` | *nil* | Don't use mmap() at all.
-| `node['dovecot']['conf']['dotlock_use_excl']` | *nil* | Rely on O_EXCL to work when creating dotlock files.
-| `node['dovecot']['conf']['mail_fsync']` | *nil* | When to use fsync() or fdatasync() calls: optimized, always or never.
-| `node['dovecot']['conf']['mail_nfs_storage']` | *nil* | Mail storage exists in NFS.
-| `node['dovecot']['conf']['mail_nfs_index']` | *nil* | Mail index files also exist in NFS.
-| `node['dovecot']['conf']['lock_method']` | *nil* | Locking method for index files: fcntl, flock or dotlock.
-| `node['dovecot']['conf']['mail_temp_dir']` | *nil* | Directory in which LDA/LMTP temporarily stores incoming mails >128 kB.
-| `node['dovecot']['conf']['first_valid_uid']` | *nil* | Valid UID range for users, defaults to 500 and above.
-| `node['dovecot']['conf']['last_valid_uid']` | *nil* | Valid UID range for users, defaults to 500 and above.
-| `node['dovecot']['conf']['first_valid_gid']` | *nil* | Valid GID range for users, defaults to non-root/wheel.
-| `node['dovecot']['conf']['last_valid_gid']` | *nil* | Valid GID range for users, defaults to non-root/wheel.
-| `node['dovecot']['conf']['mail_max_keyword_length']` | *nil* | Maximum allowed length for mail keyword name.
-| `node['dovecot']['conf']['valid_chroot_dirs']` | *nil* | ':' separated list of directories under which chrooting is allowed for mail processes.
-| `node['dovecot']['conf']['mail_chroot']` | *nil* | Default chroot directory for mail processes.
-| `node['dovecot']['conf']['auth_socket_path']` | *nil* | UNIX socket path to master authentication server to find users.
-| `node['dovecot']['conf']['mail_plugin_dir']` | *nil* | Directory where to look up mail plugins.
-| `node['dovecot']['conf']['mail_cache_min_mail_count']` | *nil* | The minimum number of mails in a mailbox before updates are done to cache file.
-| `node['dovecot']['conf']['mailbox_idle_check_interval']` | *nil* | When IDLE command is running, mailbox is checked once in a while to see if there are any new mails or other changes.
-| `node['dovecot']['conf']['mail_save_crlf']` | *nil* | Save mails with CR+LF instead of plain LF.
-| `node['dovecot']['conf']['mail_prefetch_count']` | *nil* | Max number of mails to keep open and prefetch to memory.
-| `node['dovecot']['conf']['mail_temp_scan_interval']` | *nil* | How often to scan for stale temporary files and delete them (0 = never).
-| `node['dovecot']['conf']['maildir_stat_dirs']` | *nil* | By default LIST command returns all entries in maildir beginning with a dot.
-| `node['dovecot']['conf']['maildir_copy_with_hardlinks']` | *nil* | When copying a message, do it with hard links whenever possible.
-| `node['dovecot']['conf']['maildir_very_dirty_syncs']` | *nil* | Assume Dovecot is the only MUA accessing Maildir.
-| `node['dovecot']['conf']['maildir_broken_filename_sizes']` | *nil* | If enabled, Dovecot doesn't use the `S=` in the Maildir filenames for getting the mail's physical size, except when recalculating Maildir++ quota.
-| `node['dovecot']['conf']['maildir_empty_new']` | *nil* | Always move mails from new/ directory to cur/, even when the \Recent flags aren't being reset.
-| `node['dovecot']['conf']['mbox_read_locks']` | *nil* | Which read locking methods to use for locking mbox: dotlock, dotlock_try, fcntl, flock or lockf.
-| `node['dovecot']['conf']['mbox_write_locks']` | *nil* | Which write locking methods to use for locking mbox: dotlock, dotlock_try, fcntl, flock or lockf.
-| `node['dovecot']['conf']['mbox_lock_timeout']` | *nil* | Maximum time to wait for lock (all of them) before aborting.
-| `node['dovecot']['conf']['mbox_dotlock_change_timeout']` | *nil* | If dotlock exists but the mailbox isn't modified in any way, override the lock file after this much time.
-| `node['dovecot']['conf']['mbox_dirty_syncs']` | *nil* | When mbox changes unexpectedly simply read the new mails but still safely fallbacks to re-reading the whole mbox file whenever something in mbox isn't how it's expected to be.
-| `node['dovecot']['conf']['mbox_very_dirty_syncs']` | *nil* | Like mbox_dirty_syncs, but don't do full syncs even with SELECT, EXAMINE, EXPUNGE or CHECK commands.
-| `node['dovecot']['conf']['mbox_lazy_writes']` | *nil* | Delay writing mbox headers until doing a full write sync (EXPUNGE and CHECK commands and when closing the mailbox).
-| `node['dovecot']['conf']['mbox_min_index_size']` | *nil* | If mbox size is smaller than this (e.g. 100k), don't write index files.
-| `node['dovecot']['conf']['mbox_md5']` | *nil* | Mail header selection algorithm to use for MD5 POP3 UIDLs when pop3_uidl_format=%m.
-| `node['dovecot']['conf']['mdbox_rotate_size']` | *nil* | Maximum dbox file size until it's rotated.
-| `node['dovecot']['conf']['mdbox_rotate_interval']` | *nil* | Maximum dbox file age until it's rotated.
-| `node['dovecot']['conf']['mdbox_preallocate_space']` | *nil* | When creating new mdbox files, immediately preallocate their size to mdbox_rotate_size.
-| `node['dovecot']['conf']['mail_attachment_dir']` | *nil* | Directory root where to store mail attachments. Disabled, if empty.
-| `node['dovecot']['conf']['mail_attachment_min_size']` | *nil* | Attachments smaller than this aren't saved externally.
-| `node['dovecot']['conf']['mail_attachment_fs']` | *nil* | Filesystem backend to use for saving attachments: posix, sis posix or sis-queue posix.
-| `node['dovecot']['conf']['mail_attachment_hash']` | *nil* | Hash format to use in attachment filenames.
-
-## Master Configuration File Attributes
-
-* Configuration file: `conf.d/10-master.conf`.
-
-| Attribute | Default | Description |
-|:---------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['default_process_limit']` | *nil* | Default process limit.
-| `node['dovecot']['conf']['default_client_limit']` | *nil* | Default client limit.
-| `node['dovecot']['conf']['default_vsz_limit']` | *nil* | Default VSZ (virtual memory size) limit for service processes.
-| `node['dovecot']['conf']['default_login_user']` | *nil* | Login user is internally used by login processes.
-| `node['dovecot']['conf']['default_internal_user']` | *nil* | Internal user is used by unprivileged processes.
-
-## SSL Attributes
-
-* Configuration file: `conf.d/10-ssl.conf`.
-
-| Attribute | Default | Description |
-|:-------------------------------------------------------|:-------------|:-------------------------------|
-| `node['dovecot']['conf']['ssl']` | *calculated* | SSL/TLS support: `true` or `false`.
-| `node['dovecot']['conf']['ssl_cert']` | *calculated* | PEM encoded X.509 SSL/TLS certificate.
-| `node['dovecot']['conf']['ssl_key']` | *calculated* | PEM encoded X.509 SSL/TLS private key.
-| `node['dovecot']['conf']['ssl_key_password']` | *nil* | If key file is password protected, give the password here.
-| `node['dovecot']['conf']['ssl_ca']` | *nil* | PEM encoded trusted certificate authority.
-| `node['dovecot']['conf']['ssl_require_crl']` | *nil* | Require that CRL check succeeds for client certificates.
-| `node['dovecot']['conf']['ssl_client_ca_dir']` | *nil* | Directory for trusted SSL CA certificates. These are used only when Dovecot needs to act as an SSL client.
-| `node['dovecot']['conf']['ssl_client_ca_file']` | *nil* | File for trusted SSL CA certificates. These are used only when Dovecot needs to act as an SSL client.
-| `node['dovecot']['conf']['ssl_verify_client_cert']` | *nil* | Request client to send a certificate.
-| `node['dovecot']['conf']['ssl_cert_username_field']` | *nil* | Which field from certificate to use for username.
-| `node['dovecot']['conf']['ssl_parameters_regenerate']` | *nil* | How often to regenerate the SSL parameters file.
-| `node['dovecot']['conf']['ssl_dh_parameters_length']` | *nil* | DH parameters length to use.
-| `node['dovecot']['conf']['ssl_protocols']` | *nil* | SSL protocols to use.
-| `node['dovecot']['conf']['ssl_cipher_list']` | *nil* | SSL ciphers to use.
-| `node['dovecot']['conf']['ssl_prefer_server_ciphers']` | *nil* | Prefer the server's order of ciphers over client's.
-| `node['dovecot']['conf']['ssl_crypto_device']` | *nil* | SSL crypto device to use, for valid values run `$ openssl engine`.
-| `node['dovecot']['conf']['ssl_options']` | *nil* | SSL extra options. Currently supported options are: `'no_compression'`.
-
-## LDA Specific Attributes
-
-Also used by LMTP.
-
-* Configuration files: `conf.d/15-lda.conf`.
-
-| Attribute | Default | Description |
-|:-----------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['postmaster_address']` | *nil* | Address to use when sending rejection mails.
-| `node['dovecot']['conf']['hostname']` | *nil* | Hostname to use in various parts of sent mails, eg. in Message-Id.
-| `node['dovecot']['conf']['quota_full_tempfail']` | *nil* | If user is over quota, return with temporary failure instead of bouncing the mail.
-| `node['dovecot']['conf']['sendmail_path']` | *nil* | Binary to use for sending mails.
-| `node['dovecot']['conf']['submission_host']` | *nil* | If non-empty, send mails via this SMTP host[:port] instead of sendmail.
-| `node['dovecot']['conf']['rejection_subject']` | *nil* | Subject: header to use for rejection mails.
-| `node['dovecot']['conf']['rejection_reason']` | *nil* | Human readable error message for rejection mails.
-| `node['dovecot']['conf']['recipient_delimiter']` | *nil* | Delimiter character between local-part and detail in email address.
-| `node['dovecot']['conf']['lda_original_recipient_header']` | *nil* | Header where the original recipient address (SMTP's RCPT TO: address) is taken from if not available elsewhere.
-| `node['dovecot']['conf']['lda_mailbox_autocreate']` | *nil* | Should saving a mail to a nonexistent mailbox automatically create it?
-| `node['dovecot']['conf']['lda_mailbox_autosubscribe']` | *nil* | Should automatically created mailboxes be also automatically subscribed?
-
-## Replication Specific Attributes
-
-Also used by Replication/sync of dovecot.
-
-* Configuration files: `conf.d/15-replication.conf`.
-
-| Attribute | Default | Description |
-|:-----------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['doveadm_port']` | *nil* | Used to set a default port for the doveadm replication commands.
-| `node['dovecot']['conf']['doveadm_password']` | *nil* | Needed to set an 'secret' for the replication communication between to servers.
-
-
-## LMTP Specific Attributes
-
-* Configuration file: `conf.d/20-lmtp.conf`
-
-| Attribute | Default | Description |
-|:---------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['lmtp_proxy']` | *nil* | Support proxying to other LMTP/SMTP servers by performing passdb lookups.
-| `node['dovecot']['conf']['lmtp_save_to_detail_mailbox']` | *nil* | When recipient address includes the detail (e.g. user+detail), try to save the mail to the detail mailbox.
-| `node['dovecot']['conf']['lmtp_rcpt_check_quota']` | *nil* | Verify quota before replying to RCPT TO. This adds a small overhead.
-| `node['dovecot']['conf']['lmtp_hdr_delivery_address']` | *nil* | Which recipient address to use for Delivered-To: header and Received: header.
-## Berkeley DB DB_CONFIG Attributes
-
-* Configuration file: `dovecot-db.conf.ext`.
-
-| Attribute | Default | Description |
-|:--------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['db']` | *nil* | DB_CONFIG for Berkeley DB as a hash.
-
-## Dictionary Quota SQL Attributes
-
-* Configuration files: `dovecot-dict-sql.conf.ext`.
-
-| Attribute | Default | Description |
-|:-------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['dict_sql']['connect']` | *nil* | Dict sql connect configuration as a string or an array.
-| `node['dovecot']['conf']['dict_sql']['maps']` | *nil* | Dict sql database tables maps ([see the example below](#dictionary-quota-sql-example)).
-
-## LDAP Authentication Attributes
-
-* Configuration files: `dovecot-ldap.conf.ext`.
-
-| Attribute | Default | Description |
-|:---------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['ldap']['hosts']` | *nil* | Space separated list or array of LDAP hosts to use.
-| `node['dovecot']['conf']['ldap']['uris']` | *nil* | LDAP URIs to use.
-| `node['dovecot']['conf']['ldap']['dn']` | *nil* | Distinguished Name, the username used to login to the LDAP server.
-| `node['dovecot']['conf']['ldap']['dnpass']` | *nil* | Password for LDAP server, if dn is specified.
-| `node['dovecot']['conf']['ldap']['sasl_bind']` | *nil* | Use SASL binding instead of the simple binding.
-| `node['dovecot']['conf']['ldap']['sasl_mech']` | *nil* | SASL mechanism name to use.
-| `node['dovecot']['conf']['ldap']['sasl_realm']` | *nil* | SASL realm to use.
-| `node['dovecot']['conf']['ldap']['sasl_authz_id']` | *nil* | SASL authorization ID, ie. the dnpass is for this "master user", but the dn is still the logged in user.
-| `node['dovecot']['conf']['ldap']['tls']` | *nil* | Use TLS to connect to the LDAP server.
-| `node['dovecot']['conf']['ldap']['tls_ca_cert_file']` | *nil* | TLS options, currently supported only with OpenLDAP.
-| `node['dovecot']['conf']['ldap']['tls_ca_cert_dir']` | *nil* | TLS options, currently supported only with OpenLDAP.
-| `node['dovecot']['conf']['ldap']['tls_cipher_suite']` | *nil* | TLS options, currently supported only with OpenLDAP.
-| `node['dovecot']['conf']['ldap']['tls_cert_file']` | *nil* | TLS cert/key is used only if LDAP server requires a client certificate.
-| `node['dovecot']['conf']['ldap']['tls_key_file']` | *nil* | TLS cert/key is used only if LDAP server requires a client certificate.
-| `node['dovecot']['conf']['ldap']['tls_require_cert']` | *nil* | Valid values: never, hard, demand, allow, try.
-| `node['dovecot']['conf']['ldap']['ldaprc_path']` | *nil* | Use the given ldaprc path.
-| `node['dovecot']['conf']['ldap']['debug_level']` | *nil* | LDAP library debug level as specified by LDAP_DEBUG_* in ldap_log.h.
-| `node['dovecot']['conf']['ldap']['auth_bind']` | *nil* | Use authentication binding for verifying password's validity.
-| `node['dovecot']['conf']['ldap']['auth_bind_userdn']` | *nil* | If authentication binding is used, you can save one LDAP request per login if user's DN can be specified with a common template.
-| `node['dovecot']['conf']['ldap']['ldap_version']` | *nil* | LDAP protocol version to use. Likely 2 or 3.
-| `node['dovecot']['conf']['ldap']['base']` | *nil* | LDAP base. %variables can be used here.
-| `node['dovecot']['conf']['ldap']['deref']` | *nil* | Dereference: never, searching, finding or always.
-| `node['dovecot']['conf']['ldap']['scope']` | *nil* | Search scope: base, onelevel or subtree.
-| `node['dovecot']['conf']['ldap']['user_attrs']` | *nil* | User attributes are given in LDAP-name=dovecot-internal-name list.
-| `node['dovecot']['conf']['ldap']['user_filter']` | *nil* | Filter for user lookup.
-| `node['dovecot']['conf']['ldap']['pass_attrs']` | *nil* | Password checking attributes.
-| `node['dovecot']['conf']['ldap']['pass_filter']` | *nil* | Filter for password lookups.
-| `node['dovecot']['conf']['ldap']['iterate_attrs']` | *nil* | Attributes to get a list of all users.
-| `node['dovecot']['conf']['ldap']['iterate_filter']` | *nil* | Filter to get a list of all users.
-| `node['dovecot']['conf']['ldap']['default_pass_scheme']` | *nil* | Default password scheme. "{scheme}" before password overrides this.
-
-## SQL Authentication Attributes
-
-* Configuration file: `dovecot-sql.conf.ext`.
-
-| Attribute | Default | Description |
-|:--------------------------------------------------------|:--------|:-------------------------------|
-| `node['dovecot']['conf']['sql']['driver']` | *nil* | Database driver: mysql, pgsql or sqlite.
-| `node['dovecot']['conf']['sql']['connect']` | *nil* | Database connection string or array. This is driver-specific setting.
-| `node['dovecot']['conf']['sql']['default_pass_scheme']` | *nil* | Default password scheme.
-| `node['dovecot']['conf']['sql']['password_query']` | *nil* | passdb query to retrieve the password.
-| `node['dovecot']['conf']['sql']['user_query']` | *nil* | userdb query to retrieve the user information.
-| `node['dovecot']['conf']['sql']['iterate_query']` | *nil* | Query to get a list of all usernames.
-
-
-## Distribution Package Names Attributes
-
-These attributes below contain the default required distribution packages for the supported platforms. But you are free to create your own to support other platforms. Keep in mind that all are put inside a subkey (`type`). This `node['dovecot']['packages'][type]` attribute is then used together with the `node['dovecot']['conf_files'][type]` attribute to generate the configuration files.
-
-| Attribute | Default | Description |
-|:----------------------------------------|:-------------|:-------------------------------|
-| `node['dovecot']['packages']['core']` | *calculated* | Dovecot core package names array.
-| `node['dovecot']['packages']['imap']` | *calculated* | Dovecot IMAP package names array.
-| `node['dovecot']['packages']['pop3']` | *calculated* | Dovecot POP3 package names array.
-| `node['dovecot']['packages']['lmtp']` | *calculated* | Dovecot LMTP package names array.
-| `node['dovecot']['packages']['sieve']` | *calculated* | Dovecot Sieve package names array.
-| `node['dovecot']['packages']['ldap']` | *calculated* | Dovecot LDAP package names array.
-| `node['dovecot']['packages']['sqlite']` | *calculated* | Dovecot SQLite package names array.
-| `node['dovecot']['packages']['mysql']` | *calculated* | Dovecot MySQL package names array.
-| `node['dovecot']['packages']['pgsql']` | *calculated* | Dovecot PostgreSQL package names array.
-
-## Distribution Service Configuration
-
-These attributes are used to configure the Dovecot service according to each distribution. Surely you want to change them if you want to support new platforms or want to improve the support of some platforms already supported.
-
-| Attribute | Default | Description |
-|:-----------------------------------------|:-------------|:----------------------------------|
-| `node['dovecot']['service']['name']` | `'dovecot'` | Dovecot system service name.
-| `node['dovecot']['service']['supports']` | *calculated* | Dovecot service supported actions.
-| `node['dovecot']['service']['provider']` | *calculated* | Dovecot service Chef provider class.
-
-Recipes
-=======
-
-## dovecot::default
-
-Installs and configures Dovecot.
-
-## dovecot::user
-
-Creates the dovecot system user. Used by the default recipe.
-
-## dovecot::conf_files
-
-Generates all the configuration files. Used by the default recipe.
-
-## dovecot::ohai_plugin
-
-Provides an Ohai plugin for reading dovecot install information.
-
-## dovecot::from_package
-
-Installs the required packages. Used by the default recipe if `node['dovecot']['install_from]` is `package`.
-
-## dovecot::service
-
-Configures the Dovecot service. Used by the default recipe.
-
-## dovecot::create_pwfile
-
-Creates and configures a password file from local mailboxes based on a data bag.
-
-* `node['dovecot']['databag_name']`: The Databag on which items are stored.
-* `node['dovecot']['databag_users_item']`: The databag item to use (under the databag set)
-
-Ohai Plugin
-===========
-
-The `ohai_plugin` recipe installs an Ohai plugin. It will be installed and activated automatically.
-
-It will set the following attributes:
-
-* `node['dovecot']['version']`: version of Dovecot.
-* `node['dovecot']['build-options']`: some Dovecot build options.
- * `node['dovecot']['build-options']['mail-storages']`
- * `node['dovecot']['build-options']['sql-driver-plugins']` or `node['dovecot']['build-options']['sql-drivers']`
- * `node['dovecot']['build-options']['passdb']`
- * `node['dovecot']['build-options']['userdb']`
-
-This is an output example:
-
-```json
-"dovecot": {
- "version": "2.0.19",
- "build-options": {
- "ioloop": "epoll",
- "notify": "inotify",
- "ipv6": true,
- "openssl": true,
- "io_block_size": "8192",
- "mail-storages": [
- "shared",
- "mdbox",
- "sdbox",
- "maildir",
- "mbox",
- "cydir",
- "raw"
- ],
- "sql-driver-plugins": [
- "mysql",
- "postgresql",
- "sqlite"
- ],
- "passdb": [
- "checkpassword",
- "ldap",
- "pam",
- "passwd",
- "passwd-file",
- "shadow",
- "sql"
- ],
- "userdb": [
- "checkpassword",
- "ldap(plugin)",
- "nss",
- "passwd",
- "prefetch",
- "passwd-file",
- "sql"
- ]
- }
-}
-```
-
-Usage Examples
-==============
-
-## Including in a Cookbook Recipe
-
-You can simply include it in a recipe:
-
-```ruby
-# from a recipe
-include_recipe 'dovecot'
-```
-
-Don't forget to include the `dovecot` cookbook as a dependency in the metadata.
-
-```ruby
-# metadata.rb
-# [...]
-
-depends 'dovecot'
-```
-
-## Including in the Run List
-
-Another alternative is to include the default recipe in your Run List.
-
-```json
-{
- "name": "mail.example.com",
- "[...]": "[...]",
- "run_list": [
- "recipe[dovecot]"
- ]
-}
-```
-
-## Authentication Database Examples
-
-Authentication database attributes, inside passdb or usedb hash values, can contain both arrays or hashes.
-
-Supported auths are the following: `checkpassword`, `deny`, `ldap`, `master`, `passwdfile`, `sql`, `system` and `vpopmail`.
-
-```ruby
-node.default['dovecot']['auth']['checkpassword'] =
- {
- 'passdb' => { # hash
- 'driver' => 'checkpassword',
- 'args' => '/usr/bin/checkpassword'
- },
- 'userdb' => {
- 'driver' => 'prefetch'
- }
- }
-```
-
-```ruby
-node.default['dovecot']['auth']['system']['passdb'] =
- [ # array
- {
- # without driver
- 'args' => 'dovecot'
- },
- {
- 'driver' => 'passwd',
- 'args' => ''
- },
- {
- 'driver' => 'shadow',
- 'args' => ''
- },
- {
- 'driver' => 'bsdauth',
- 'args' => ''
- }
- ]
-```
-
-## Dictionary Quota SQL Example
-
-```ruby
-node.default['dovecot']['conf']['dict_sql']['maps'] =
- [
- {
- 'pattern' => 'priv/quota/storage',
- 'table' => 'quota',
- 'username_field' => 'username',
- 'value_field' => 'bytes'
- },
- {
- 'pattern' => 'priv/quota/messages',
- 'table' => 'quota',
- 'username_field' => 'username',
- 'value_field' => 'messages'
- },
- {
- 'pattern' => 'shared/expire/$user/$mailbox',
- 'table' => 'expires',
- 'value_field' => 'expire_stamp',
- 'fields' => {
- 'username' => '$user',
- 'mailbox' => '$mailbox'
- }
- }
- ]
-```
-
-## Namespaces Example
-
-The `['namespaces']` attribute is an array, which could contain both array or hash values.
-
-```ruby
-node.default['dovecot']['namespaces'] = [
- {
- 'separator' => '/',
- 'prefix' => '"#mbox/"',
- 'location' => 'mbox:~/mail:INBOX=/var/mail/%u',
- 'inbox' => true,
- 'hidden' => true,
- 'list' => false
- }, {
- 'separator' => '/',
- 'prefix' => '',
- 'location' => 'maildir:~/Maildir'
- }, { # this requires Dovecot >= 2.1
- 'name' => 'inbox',
- 'separator' => '/',
- 'prefix' => '',
- 'inbox' => true,
- 'mailboxes' => {
- 'Drafts' => {
- 'special_use' => '\Drafts'
- },
- 'Junk' => {
- 'special_use' => '\Junk'
- },
- 'Trash' => {
- 'special_use' => '\Trash'
- },
- 'Sent' => {
- 'special_use' => '\Sent'
- },
- 'Sent Messages' => {
- 'special_use' => '\Sent'
- },
- 'virtual/All' => {
- 'special_use' => '\All'
- },
- 'virtual/Flagged' => {
- 'special_use' => '\All'
- }
- }
- }
-]
-```
-
-## Plugins Examples
-
-Plugin attribute values should be of type hash.
-
-Supported plugins are the following: `mail_log`, `acl` and `quota`.
-
-### Mail Log Plugin Example
-
-```ruby
-node.default['dovecot']['plugins']['mail_log'] = {
- 'mail_log_events' =>
- 'delete undelete expunge copy mailbox_delete mailbox_rename',
- 'mail_log_fields' => 'uid box msgid size'
-}
-```
-
-### Sieve Plugin Example
-
-```ruby
-node.default['dovecot']['plugins']['sieve'] = {
- 'sieve' => '~/.dovecot.sieve',
- 'sieve_dir' => '~/sieve'
-}
-```
-
-## Protocols Examples
-
-Protocol attribute values should be of type hash.
-
-Supported protocols are the following: `lda`, `imap`, `lmtp`, `sieve` and `pop3`.
-
-```ruby
-node.default['dovecot']['protocols']['lda'] = {
- 'mail_plugins' => %w($mail_plugins)
-}
-```
-
-To enable the IMAP protocol without additional settings:
-
-```ruby
-node.default['dovecot']['protocols']['imap'] = {}
-```
-
-## Service Examples
-
-The `['services']` attribute is a hash. Each service attribute should be a hash. But the `['listeners']` subkey could contain both a hash and an array.
-
-Inside this `listeners` key, you should name each listener with the format *PROTOCOL:NAME*. Allowed protocols are `fifo`, `unix` and `inet`.
-
-Supported services are the following: `anvil`, `director`, `imap-login`, `pop3-login`, `lmtp`, `imap`, `pop3`, `auth`, `auth-worker`, `dict`, `tcpwrap`, `managesieve-login`, `managesieve`, `aggregator`, `replicator`, `config`.
-
-### Director Service Example
-
-```ruby
-node.default['dovecot']['services']['director']['listeners'] = [
- { 'unix:login/director' => { 'mode' => '0666' } },
- { 'fifo:login/proxy-notify' => { 'mode' => '0666' } },
- { 'unix:director-userdb' => { 'mode' => '0666' } },
- { 'inet' => { 'port' => '5432' } }
-]
-```
-
-### Imap-login Service Example
-
-```ruby
-node.default['dovecot']['services']['imap-login'] = {
- 'listeners' => [
- { 'inet:imap' => { 'port' => 143 } },
- { 'inet:imaps' => { 'port' => 993, 'ssl' => true } }
- ],
- 'service_count' => 1,
- 'process_min_avail' => 0,
- 'vsz_limit' => '64M'
-}
-```
-
-### Doveadm Service Example
-
-```ruby
-default['dovecot']['services']['doveadm'] = {
- 'listeners' => [
- { 'inet:doveadm-server' => { 'port' => 3333 }
- }
- ]
-}
-```
-
-### Quota-status Service Example
-
-```ruby
-default['dovecot']['services']['quota-status'] = {
- 'executable' => 'quota-status -p postfix',
- 'listeners' => [
- { 'inet:imap' => { 'port' => 4444 } }
- ]
-}
-```
-
-### Quota-warning Service Example
-
-```ruby
-default['dovecot']['services']['quota-warning'] = {
- 'user' => 'dovecot',
- 'executable' => 'script /usr/local/bin/quota-warning.sh',
- 'listeners' => [
- { 'unix:quota-warning' => { 'user' => 'postfix' } }
- ]
-}
-```
-
-## LDAP Example
-
-This is a recipe example to integrate Dovecot with [OpenLDAP](http://www.openldap.org/). The following cookbooks are used:
-
-* [`openldap`](https://supermarket.chef.io/cookbooks/openldap)
-* [`ldap`](https://supermarket.chef.io/cookbooks/ldap)
-
-```ruby
-# Function to generate the passwords in LDAP format
-def generate_ldap_password(password, salt = '12345')
- require 'digest'
- require 'base64'
- digest = Digest::SHA1.digest(password + salt)
- '{SSHA}' + Base64.encode64(digest + salt).chomp
-end
-recipe = self
-
-# Create LDAP credentials
-ldap_password = 'secretsauce'
-ldap_credentials = {
- 'bind_dn' => "cn=#{node['openldap']['cn']},#{node['openldap']['basedn']}",
- 'password' => ldap_password
-}
-
-# Configure OpenLDAP server
-node.default['openldap']['tls_enabled'] = false
-node.default['openldap']['rootpw'] = generate_ldap_password(ldap_password)
-node.default['openldap']['loglevel'] = 'any'
-
-include_recipe 'openldap::server'
-
-# Create some LDAP entries as an example
-
-include_recipe 'ldap'
-
-ldap_entry node['openldap']['basedn'] do
- attributes objectClass: %w(top dcObject organization),
- o: 'myorg',
- dc: 'myorg',
- description: 'My organization'
- credentials ldap_credentials
-end
-
-ldap_entry "ou=accounts,#{node['openldap']['basedn']}" do
- attributes objectClass: %w(top organizationalUnit),
- ou: 'accounts',
- description: 'Dovecot email accounts'
- credentials ldap_credentials
-end
-
-ldap_entry "cn=dovecot,ou=accounts,#{node['openldap']['basedn']}" do
- attributes objectClass: %w(top person),
- cn: 'dovecot',
- sn: 'dovecot'
- credentials ldap_credentials
-end
-
-# Create an email account
-
-email_account = {
- cn: 'Ole Wobble Olson',
- sn: 'Olson',
- uid: 'wobble',
- uidNumber: '1002', # should be an string for ldap_entry
- gidNumber: '100',
- homeDirectory: '/home/wobble',
- userPassword: recipe.generate_ldap_password('w0bbl3_p4ss')
-}
-
-ldap_entry "uid=wobble,ou=accounts,#{node['openldap']['basedn']}" do
- attributes email_account.merge(objectClass: %w(top person posixAccount))
- credentials ldap_credentials
-end
-
-# Create home directory for the email account
-directory email_account[:homeDirectory] do
- owner email_account[:uidNumber].to_i # should be an integer for directory
- group email_account[:gidNumber].to_i
-end
-
-# Dovecot IMAP configuration
-node.default['dovecot']['conf']['mail_location'] = 'maildir:~/Maildir'
-node.default['dovecot']['protocols']['imap'] = {}
-node.default['dovecot']['services']['imap-login'] =
- {
- 'listeners' =>
- [
- { 'inet:imap' => { 'port' => 143 } },
- { 'inet:imaps' => { 'port' => 993, 'ssl' => true } }
- ],
- 'service_count' => 1,
- 'process_min_avail' => 0,
- 'vsz_limit' => '64M'
- }
-
-# Dovecot LDAP configuration
-node.default['dovecot']['conf']['ldap']['auth_bind'] = true
-node.default['dovecot']['conf']['ldap']['hosts'] = %w(localhost)
-node.default['dovecot']['conf']['ldap']['dn'] = ldap_credentials['bind_dn']
-node.default['dovecot']['conf']['ldap']['dnpass'] = ldap_credentials['password']
-node.default['dovecot']['conf']['ldap']['base'] = node['openldap']['basedn']
-
-include_recipe 'dovecot'
-```
-
-## Password File Example
-
-This is an example of how to use userdb password file.
-
-```ruby
-# Define databag and item inside Databag (default.conf):
-node.default['dovecot']['databag_name'] = 'dovecot'
-node.default['dovecot']['databag_users_item'] = 'users'
-
-# Attributes for userdb to function:
-node.default['dovecot']['auth']['passwdfile'] =
- {
- 'passdb' => {
- 'driver' => 'passwd-file',
- 'args' => node['dovecot']['conf']['password_file']
- },
- 'userdb' => {
- 'driver' => 'passwd-file',
- 'args' =>
- "username_format=%u #{node['dovecot']['conf']['password_file']}",
- 'default_fields' => 'home=/var/dovecot/vmail/%d/%n'
- }
- }
-
-# Include this recipe on your:
-include_recipe 'dovecot::pwfile-file'
-```
-
-Databag example, two ways of defining a user included:
-
-```json
-{
- "users": {
- "dilan": "password1234",
- "vassilis": [
- "vassilis1234", null, null, null, null, null, null
- ]
- }
-}
-```
-
-## A Complete Example
-
-This is a complete recipe example for installing and configuring Dovecot 2 to work with PostfixAdmin MySQL tables, including IMAP service:
-
-```ruby
-
-node.default['dovecot']['conf_files_group'] = 'vmail'
-
-node.default['dovecot']['conf']['disable_plaintext_auth'] = false
-node.default['dovecot']['conf_files_mode'] = '00640'
-
-# 10-logging.conf
-node.default['dovecot']['conf']['log_path'] = 'syslog'
-node.default['dovecot']['conf']['syslog_facility'] = 'mail'
-node.default['dovecot']['conf']['log_timestamp'] = '"%Y-%m-%d %H:%M:%S"'
-
-# 10-mail.conf
-node.default['dovecot']['conf']['mail_location'] = 'maildir:~/Maildir'
-node.default['dovecot']['conf']['mail_privileged_group'] = 'mail'
-
-# 10-master.conf
-node.default['dovecot']['services']['auth']['listeners'] = [
- # auth_socket_path points to this userdb socket by default. It's typically
- # used by dovecot-lda, doveadm, possibly imap process, etc. Its default
- # permissions make it readable only by root, but you may need to relax these
- # permissions. Users that have access to this socket are able to get a list
- # of all usernames and get results of everyone's userdb lookups.
- { 'unix:auth-userdb' => {
- 'mode' => '0600',
- 'user' => 'vmail',
- 'group' => 'vmail'
- } },
- # Postfix smtp-auth
- { 'unix:/var/spool/postfix/private/auth' => {
- 'mode' => '0666',
- 'user' => 'postfix',
- 'group' => 'postfix'
- } }
-]
-
-# 15-lda.conf
-# TODO: Change this to fit your server:
-node.default['dovecot']['conf']['postmaster_address'] =
- 'postmaster@mycompany.org'
-# TODO: Change this to fit your server:
-node.default['dovecot']['conf']['hostname'] = 'mail.mycompany.org'
-node.default['dovecot']['conf']['lda_mailbox_autocreate'] = true
-node.default['dovecot']['conf']['lda_mailbox_autosubscribe'] = true
-# We want sieve enabled
-node.default['dovecot']['protocols']['lda']['mail_plugins'] =
- %w($mail_plugins sieve)
-
-# 20-imap.conf
-# We want IMAP enabled with the default configuration
-node.default['dovecot']['protocols']['imap'] = {}
-
-# 90-sieve.conf
-node.default['dovecot']['plugins']['sieve']['sieve'] = '~/.dovecot.sieve'
-node.default['dovecot']['plugins']['sieve']['sieve_dir'] = '~/sieve'
-node.default['dovecot']['plugins']['sieve']['sieve_global_path'] =
- "#{node['dovecot']['conf_path']}/sieve/default.sieve"
-
-# auth-sql.conf.ext
-node.default['dovecot']['auth']['sql']['passdb']['args'] =
- '/etc/dovecot/dovecot-sql.conf.ext'
-node.default['dovecot']['auth']['sql']['userdb']['args'] =
- '/etc/dovecot/dovecot-sql.conf.ext'
-
-# auth-static.conf.ext
-node.default['dovecot']['auth']['static']['userdb']['args'] = %w(
- uid=vmail
- gid=vmail
- home=/var/vmail/%d/%n
- allow_all_users=yes
-)
-
-# auth-system.conf.ext
-node.default['dovecot']['auth']['system'] = {}
-
-# dovecot-sql.conf.ext
-# We want to enable MySQL driver
-node.default['dovecot']['conf']['sql']['driver'] = 'mysql'
-# TODO: Change the database password below, please:
-node.default['dovecot']['conf']['sql']['connect'] = %w(
- host=localhost
- dbname=postfix
- user=postfix
- password=postfix_pass
-)
-# md5crypt encryption method
-node.default['dovecot']['conf']['sql']['default_pass_scheme'] = 'MD5-CRYPT'
-node.default['dovecot']['conf']['sql']['password_query'] = [
- 'SELECT username AS user, password',
- 'FROM mailbox',
- "WHERE username = '%u' AND active = '1'"
-]
-node.default['dovecot']['conf']['sql']['user_query'] = [
- 'SELECT',
- ' username AS user,',
- ' password,',
- ' 5000 as uid,',
- ' 5000 as gid,',
- " concat('/var/vmail/', maildir) AS home,",
- " concat('maildir:/var/vmail/', maildir) AS mail",
- 'FROM mailbox',
- "WHERE username = '%u' AND active = '1'"
-]
-
-node.default['dovecot']['conf']['sql']['iterate_query'] = [
- 'SELECT username AS user',
- "FROM mailbox WHERE active = '1'"
-]
-
-include_recipe 'dovecot'
-
-# Compile sieve scripts
-
-# this should go after installing dovecot, sievec is required
-sieve_global_path = "#{node['dovecot']['conf_path']}/sieve/default.sieve"
-execute 'sievec sieve_global_path' do
- command "sievec '#{sieve_global_path}'"
- action :nothing
-end
-directory ::File.dirname(sieve_global_path) do
- owner 'root'
- group 'root'
- mode '00755'
- recursive true
- not_if { ::File.exist?(::File.dirname(sieve_global_path)) }
-end
-# This will be the default sieve script:
-template node['dovecot']['plugins']['sieve']['sieve_global_path'] do
- source 'default.sieve.erb'
- owner 'root'
- group 'root'
- mode '00644'
- notifies :run, 'execute[sievec sieve_global_path]'
-end
-```
-
-If you want a more complete example, you can look at the [postfix-dovecot](https://github.com/zuazo/postfix-dovecot-cookbook) recipe.
-
-Testing
-=======
-
-See [TESTING.md](https://github.com/zuazo/dovecot-cookbook/blob/master/TESTING.md).
-
-Contributing
-============
-
-Please do not hesitate to [open an issue](https://github.com/zuazo/dovecot-cookbook/issues/new) with any questions or problems.
-
-See [CONTRIBUTING.md](https://github.com/zuazo/dovecot-cookbook/blob/master/CONTRIBUTING.md).
-
-TODO
-====
-
-See [TODO.md](https://github.com/zuazo/dovecot-cookbook/blob/master/TODO.md).
-
-License and Author
-==================
-
-| | |
-|:---------------------|:-----------------------------------------|
-| **Author:** | [Xabier de Zuazo](https://github.com/zuazo) ()
-| **Contributor:** | [Andreas Lappe](https://github.com/alappe)
-| **Contributor:** | [Trond Arve Nordheim](https://github.com/tanordheim)
-| **Contributor:** | [Johan Svensson](https://github.com/loxley)
-| **Contributor:** | [Arnold Krille](https://github.com/kampfschlaefer)
-| **Contributor:** | [claudex](https://github.com/claudex)
-| **Contributor:** | [Jordi Llonch](https://github.com/llonchj)
-| **Contributor:** | [Michael Burns](https://github.com/mburns)
-| **Contributor:** | [Marcus Klein](https://github.com/kleini)
-| **Contributor:** | [Vassilis Aretakis](https://github.com/billiaz)
-| **Contributor:** | [Edgaras Lukosevicius](https://github.com/ledgr)
-| **Contributor:** | [Sjoerd Tromp](https://github.com/stromp)
-| **Contributor:** | [Sander van Harmelen](https://github.com/svanharmelen)
-| **Copyright:** | Copyright (c) 2015-2016, Xabier de Zuazo
-| **Copyright:** | Copyright (c) 2013-2015, Onddo Labs, SL.
-| **License:** | Apache License, Version 2.0
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/dovecot/Rakefile b/lc-gdn-chef/cookbooks/dovecot/Rakefile
deleted file mode 100644
index 762128ea260ad0eff5240112c2deeb1d0d4b8861..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/Rakefile
+++ /dev/null
@@ -1,150 +0,0 @@
-# encoding: UTF-8
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-#
-# Available Rake tasks:
-#
-# $ rake -T
-# rake clean # Clean some generated files
-# rake default # Run doc, style, unit and integration tests
-# rake doc # Generate Ruby documentation
-# rake integration[regexp,action] # Run Test Kitchen integration tests
-# rake integration:cloud[regexp,action] # Run Kitchen tests in the cloud
-# rake integration:docker[regexp,action] # Run Kitchen tests using docker
-# rake integration:vagrant[regexp,action] # Run Kitchen tests using vagrant
-# rake style # Run all style checks
-# rake style:chef # Run Chef style checks using foodcritic
-# rake style:ruby # Run Ruby style checks using rubocop
-# rake style:ruby:auto_correct # Auto-correct RuboCop offenses
-# rake unit # Run ChefSpec unit tests
-# rake yard # Generate Ruby documentation using yard
-#
-# More info at https://github.com/ruby/rake/blob/master/doc/rakefile.rdoc
-#
-
-require 'bundler/setup'
-
-# Checks if we are inside a Continuous Integration machine.
-#
-# @return [Boolean] whether we are inside a CI.
-# @example
-# ci? #=> false
-def ci?
- ENV['CI'] == 'true'
-end
-
-desc 'Clean some generated files'
-task :clean do
- %w(
- Berksfile.lock
- .bundle
- .cache
- coverage
- Gemfile.lock
- .kitchen
- metadata.json
- vendor
- ).each { |f| FileUtils.rm_rf(Dir.glob(f)) }
-end
-
-desc 'Generate Ruby documentation using yard'
-task :yard do
- require 'yard'
- YARD::Rake::YardocTask.new do |t|
- t.stats_options = %w(--list-undoc)
- end
-end
-
-desc 'Generate Ruby documentation'
-task doc: %w(yard)
-
-namespace :style do
- require 'rubocop/rake_task'
- desc 'Run Ruby style checks using rubocop'
- RuboCop::RakeTask.new(:ruby)
-
- require 'foodcritic'
- desc 'Run Chef style checks using foodcritic'
- FoodCritic::Rake::LintTask.new(:chef) do |t|
- t.options = {
- fail_tags: ['any'],
- progress: true
- }
- end
-end
-
-desc 'Run all style checks'
-task style: %w(style:chef style:ruby)
-
-desc 'Run ChefSpec unit tests'
-task :unit do
- require 'rspec/core/rake_task'
- RSpec::Core::RakeTask.new(:unit) do |t|
- t.rspec_opts = '--color --format progress'
- t.pattern = 'test/unit/**{,/*/**}/*_spec.rb'
- end
-end
-
-desc 'Run Test Kitchen integration tests'
-namespace :integration do
- # Generates the `Kitchen::Config` class configuration values.
- #
- # @param loader_config [Hash] loader configuration options.
- # @return [Hash] configuration values for the `Kitchen::Config` class.
- def kitchen_config(loader_config = {})
- {}.tap do |config|
- unless loader_config.empty?
- @loader = Kitchen::Loader::YAML.new(loader_config)
- config[:loader] = @loader
- end
- end
- end
-
- # Gets a collection of instances.
- #
- # @param regexp [String] regular expression to match against instance names.
- # @param config [Hash] configuration values for the `Kitchen::Config` class.
- # @return [Collection] all instances.
- def kitchen_instances(regexp, config)
- instances = Kitchen::Config.new(config).instances
- return instances if regexp.nil? || regexp == 'all'
- instances.get_all(Regexp.new(regexp))
- end
-
- # Runs a test kitchen action against some instances.
- #
- # @param action [String] kitchen action to run (defaults to `'test'`).
- # @param regexp [String] regular expression to match against instance names.
- # @param loader_config [Hash] loader configuration options.
- # @return void
- def run_kitchen(action, regexp, loader_config = {})
- action = 'test' if action.nil?
- require 'kitchen'
- Kitchen.logger = Kitchen.default_file_logger
- config = kitchen_config(loader_config)
- kitchen_instances(regexp, config).each { |i| i.send(action) }
- end
-
- desc 'Run Test Kitchen integration tests using vagrant'
- task :vagrant, [:regexp, :action] do |_t, args|
- run_kitchen(args.action, args.regexp)
- end
-
- desc 'Run Test Kitchen integration tests using docker'
- task :docker, [:regexp, :action] do |_t, args|
- run_kitchen(args.action, args.regexp, local_config: '.kitchen.docker.yml')
- end
-
- desc 'Run Test Kitchen integration tests in the cloud'
- task :cloud, [:regexp, :action] do |_t, args|
- run_kitchen(args.action, args.regexp, local_config: '.kitchen.cloud.yml')
- end
-end
-
-desc 'Run Test Kitchen integration tests'
-task :integration, [:regexp, :action] =>
- ci? ? %w(integration:docker) : %w(integration:vagrant)
-
-desc 'Run doc, style, unit and integration tests'
-task default: %w(doc style unit integration)
diff --git a/lc-gdn-chef/cookbooks/dovecot/TESTING.md b/lc-gdn-chef/cookbooks/dovecot/TESTING.md
deleted file mode 100644
index ed4de16ff3e4e7420b3950c243cc81544d58a880..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/TESTING.md
+++ /dev/null
@@ -1,152 +0,0 @@
-Testing
-=======
-
-## Installing the Requirements
-
-You must have [VirtualBox](https://www.virtualbox.org/manual/ch02.html)&[Vagrant](https://www.vagrantup.com/docs/installation/) or [Docker](https://docs.docker.com/engine/installation/) installed.
-
-It's also recommend to install [ChefDK](https://downloads.chef.io/chef-dk/):
-
- $ curl -L https://www.getchef.com/chef/install.sh | sudo bash -s -- -P chefdk
- $ eval "$(/opt/chefdk/bin/chef shell-init bash)"
-
-You can install gem dependencies with bundler:
-
- $ chef exec bundle install --without travis
-
-Or, if you don't want to use ChefDK:
-
- $ gem install bundler
- $ bundle install --without travis
-
-## Generating the Documentation
-
-This will generate the documentation for the source files inside the [*libraries/*](https://github.com/zuazo/dovecot-cookbook/tree/master/libraries) directory.
-
- $ chef exec bundle exec rake doc
-
-The documentation is included in the source code itself.
-
-## Syntax Style Tests
-
-We use the following tools to test the code style:
-
-* [RuboCop](https://github.com/bbatsov/rubocop#readme)
-* [foodcritic](http://www.foodcritic.io/)
-
-To run the tests:
-
- $ chef exec bundle exec rake style
-
-Or:
-
- $ rubocop .
- $ foodcritic .
-
-## Unit Tests
-
-We use [ChefSpec](https://github.com/sethvargo/chefspec#readme) and [RSpec](http://rspec.info/) for the unit tests. RSpec is generally used to test the libraries or some Ruby specific code.
-
-The unit test files are placed in the [*test/unit/*](https://github.com/zuazo/dovecot-cookbook/tree/master/test/unit) directory.
-
-To run the tests:
-
- $ chef exec bundle exec rake unit
-
-Or:
-
- $ rspec test/unit
-
-## Integration Tests in Vagrant
-
-We use [Test Kitchen](http://kitchen.ci/) to run the tests and the tests are written using [Serverspec](http://serverspec.org/).
-
-The integration test files are placed in the [*test/integration/*](https://github.com/zuazo/dovecot-cookbook/tree/master/test/integration) directory. Some cookbooks required by this tests are in the [*test/cookbooks/*](https://github.com/zuazo/dovecot-cookbook/tree/master/test/cookbooks) directory.
-
-To run the tests:
-
- $ chef exec bundle exec rake integration:vagrant
-
-Or:
-
- $ kitchen list
- $ kitchen test
- [...]
-
-### Integration Tests in Docker
-
-You can run the integration tests using [Docker](https://www.docker.com/) instead of Vagrant if you prefer.
-
-Of course, you need to have [Docker installed](https://docs.docker.com/engine/installation/).
-
- $ wget -qO- https://get.docker.com/ | sh
-
-Then use the `integration:docker` rake task to run the tests:
-
- $ bundle exec rake integration:docker
-
-Or:
-
- $ export KITCHEN_LOCAL_YAML=.kitchen.docker.yml
- $ kitchen list
- $ kitchen test
-
-### Integration Tests in the Cloud
-
-You can run the tests in the cloud instead of using Vagrant. First, you must set the following environment variables:
-
-* `AWS_ACCESS_KEY_ID`
-* `AWS_SECRET_ACCESS_KEY`
-* `AWS_KEYPAIR_NAME`: EC2 SSH public key name. This is the name used in Amazon EC2 Console's Key Pars section.
-* `EC2_SSH_KEY_PATH`: EC2 SSH private key local full path. Only when you are not using an SSH Agent.
-* `DIGITALOCEAN_ACCESS_TOKEN`
-* `DIGITALOCEAN_SSH_KEY_IDS`: DigitalOcean SSH numeric key IDs.
-* `DIGITALOCEAN_SSH_KEY_PATH`: DigitalOcean SSH private key local full path. Only when you are not using an SSH Agent.
-
-Then use the `integration:cloud` rake task to run the tests:
-
- $ bundle exec rake integration:cloud
-
-## Guard
-
-Guard is a tool that runs the tests automatically while you are making changes to the source files.
-
-To run Guard:
-
- $ guard
-
-More info at [Guard Readme](https://github.com/guard/guard#readme).
-
-## Available Rake Tasks
-
-There are multiple Rake tasks that you can use to run the tests:
-
- $ rake -T
-
-See [Rakefile documentation](https://github.com/ruby/rake/blob/master/doc/rakefile.rdoc) for more information.
-
-## Using Vagrant with the Vagrantfile
-
-### Vagrantfile Requirements
-
-* [ChefDK](https://downloads.chef.io/chef-dk/)
-* Berkhelf and Omnibus vagrant plugins:
-```
-$ vagrant plugin install vagrant-berkshelf vagrant-omnibus
-```
-* The environment correctly configured for ChefDK:
-```
-$ eval "$(/opt/chefdk/bin/chef shell-init bash)"
-```
-
-### Vagrantfile Usage
-
- $ vagrant up
-
-To run Chef again on the same machine:
-
- $ vagrant provision
-
-To destroy the machine:
-
- $ vagrant destroy
diff --git a/lc-gdn-chef/cookbooks/dovecot/TODO.md b/lc-gdn-chef/cookbooks/dovecot/TODO.md
deleted file mode 100644
index 87b0a47b5cf7958ae5cab4cd2100a7ef30285143..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/TODO.md
+++ /dev/null
@@ -1,7 +0,0 @@
-TODO
-====
-
-* [ ] Complete the **dovecot-dict-auth.conf.ext.erb** template.
-* [ ] Add unit tests for libraries.
-* [ ] Install from sources.
-* [ ] Integrate with `ssl_certificate` cookbook?
diff --git a/lc-gdn-chef/cookbooks/dovecot/Vagrantfile b/lc-gdn-chef/cookbooks/dovecot/Vagrantfile
deleted file mode 100644
index 86c49946f1ce5739809795a2545fa84dfdfcc6b2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/Vagrantfile
+++ /dev/null
@@ -1,124 +0,0 @@
-# encoding: UTF-8
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# See TESTING.md file.
-
-Vagrant.configure('2') do |config|
- # All Vagrant configuration is done here. The most common configuration
- # options are documented and commented below. For a complete reference, please
- # see the online documentation at vagrantup.com.
-
- config.vm.hostname = 'dovecot.local'
-
- # Opscode Chef Vagrant box to use.
- # More boxes here: https://github.com/opscode/bento
- opscode_box = 'opscode-ubuntu-12.04'
-
- # Every Vagrant virtual environment requires a box to build off of.
- config.vm.box = opscode_box
-
- # The url from where the 'config.vm.box' box will be fetched if it doesn't
- # already exist on the user's system.
- config.vm.box_url =
- 'http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/'\
- "#{opscode_box.sub('-', '_')}_chef-provisionerless.box"
-
- # Assign this VM to a host-only network IP, allowing you to access it via the
- # IP. Host-only networks can talk to the host machine as well as any other
- # machines on the same network, but cannot be accessed (through this network
- # interface) by any external networks.
- config.vm.network :private_network, ip: '10.73.57.103'
-
- # Create a public network, which generally matched to bridged network. Bridged
- # networks make the machine appear as another physical device on your network.
-
- # config.vm.network :public_network
-
- # Create a forwarded port mapping which allows access to a specific port
- # within the machine from a port on the host machine. In the example below,
- # accessing 'localhost:8080' will access port 80 on the guest machine.
- # config.vm.network :forwarded_port, guest: 80, host: 8080, auto_correct: true
- # pop3, pop3s, imap, imaps
- config.vm.network :forwarded_port, guest: 110, host: 8110, auto_correct: true
- config.vm.network :forwarded_port, guest: 995, host: 8995, auto_correct: true
- config.vm.network :forwarded_port, guest: 143, host: 8143, auto_correct: true
- config.vm.network :forwarded_port, guest: 993, host: 8993, auto_correct: true
-
- # The time in seconds that Vagrant will wait for the machine to boot and be
- # accessible.
- config.vm.boot_timeout = 120
-
- # Share an additional folder to the guest VM. The first argument is the path
- # on the host to the actual folder. The second argument is the path on the
- # guest to mount the folder. And the optional third argument is a set of
- # non-required options.
- # config.vm.synced_folder '../data', '/vagrant_data'
-
- # Provider-specific configuration so you can fine-tune various backing
- # providers for Vagrant. These expose provider-specific options.
- # Example for VirtualBox:
- #
- # config.vm.provider :virtualbox do |vb|
- # # Don't boot with headless mode
- # vb.gui = true
- #
- # # Use VBoxManage to customize the VM. For example to change memory:
- # vb.memory = 1024
- # end
- #
- # View the documentation for the provider you're using for more information on
- # available options.
-
- # Install the latest version of Chef.
- config.omnibus.chef_version = :latest
-
- # Enabling the Berkshelf plugin. To enable this globally, add this
- # configuration option to your ~/.vagrant.d/Vagrantfile file.
- config.berkshelf.enabled = true
-
- # The path to the Berksfile to use with Vagrant Berkshelf.
- # config.berkshelf.berksfile_path = './Berksfile'
-
- # An array of symbols representing groups of cookbook described in the
- # Vagrantfile to exclusively install and copy to Vagrant's shelf.
- # config.berkshelf.only = []
-
- # An array of symbols representing groups of cookbook described in the
- # Vagrantfile to skip installing and copying to Vagrant's shelf.
- # config.berkshelf.except = []
-
- config.vm.provision :chef_solo do |chef|
- # Set your node attributes here:
- chef.json = {}
-
- chef.run_list = %w(
- recipe[apt]
- recipe[dovecot::default]
- )
- end
-
- # Enable provisioning with chef server, specifying the chef server URL, and
- # the path to the validation key (relative to this Vagrantfile).
- #
- # The Opscode Platform uses HTTPS. Substitute your organization for ORGNAME in
- # the URL and validation key.
- #
- # If you have your own Chef Server, use the appropriate URL, which may be HTTP
- # instead of HTTPS depending on your configuration. Also change the validation
- # key to validation.pem.
- #
- # orgname = 'ORGNAME'
- # config.vm.provision :chef_client do |chef|
- # chef.chef_server_url = "https://api.chef.io/organizations/#{orgname}"
- # chef.validation_key_path = "#{orgname}-validator.pem"
- # end
- #
- # If you're using the Opscode platform, your validator client is
- # ORGNAME-validator, replacing ORGNAME with your organization name.
- #
- # If you have your own Chef Server, the default validation client name is
- # chef-validator, unless you changed the configuration.
- #
- # chef.validation_client_name = "#{orgname}-validator"
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/auth.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/auth.rb
deleted file mode 100644
index 40f8c623199b29a1a6466b77dd00bec3e621d689..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/auth.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: auth
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['auth'] = {}
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_auth.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_auth.rb
deleted file mode 100644
index 86a4fdef70cdb4a661a81b328a4f47da4d03bea6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_auth.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_auth
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-auth.conf
-
-default['dovecot']['conf']['disable_plaintext_auth'] = nil
-default['dovecot']['conf']['auth_cache_size'] = nil
-default['dovecot']['conf']['auth_cache_ttl'] = nil
-default['dovecot']['conf']['auth_cache_negative_ttl'] = nil
-default['dovecot']['conf']['auth_realms'] = nil
-default['dovecot']['conf']['auth_default_realm'] = nil
-default['dovecot']['conf']['auth_username_chars'] = nil
-default['dovecot']['conf']['auth_username_translation'] = nil
-default['dovecot']['conf']['auth_username_format'] = nil
-default['dovecot']['conf']['auth_master_user_separator'] = nil
-default['dovecot']['conf']['auth_anonymous_username'] = nil
-default['dovecot']['conf']['auth_worker_max_count'] = nil
-default['dovecot']['conf']['auth_gssapi_hostname'] = nil
-default['dovecot']['conf']['auth_krb5_keytab'] = nil
-default['dovecot']['conf']['auth_use_winbind'] = nil
-default['dovecot']['conf']['auth_winbind_helper_path'] = nil
-default['dovecot']['conf']['auth_failure_delay'] = nil
-default['dovecot']['conf']['auth_ssl_require_client_cert'] = nil
-default['dovecot']['conf']['auth_mechanisms'] = 'plain'
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_director.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_director.rb
deleted file mode 100644
index ecae018cd2b0ceb61611de9d5045b22fa3a6dc66..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_director.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_director
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-director.conf
-
-default['dovecot']['conf']['director_servers'] = nil
-default['dovecot']['conf']['director_mail_servers'] = nil
-default['dovecot']['conf']['director_user_expire'] = nil
-default['dovecot']['conf']['director_doveadm_port'] = nil
-default['dovecot']['conf']['director_username_hash'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_logging.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_logging.rb
deleted file mode 100644
index d5d5e15813b23d4618ae13fca40468a76bcf72a3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_logging.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_logging
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-logging.conf
-
-default['dovecot']['conf']['log_path'] = nil
-default['dovecot']['conf']['info_log_path'] = nil
-default['dovecot']['conf']['debug_log_path'] = nil
-default['dovecot']['conf']['syslog_facility'] = nil
-default['dovecot']['conf']['auth_verbose'] = nil
-default['dovecot']['conf']['auth_verbose_passwords'] = nil
-default['dovecot']['conf']['auth_debug'] = nil
-default['dovecot']['conf']['auth_debug_passwords'] = nil
-default['dovecot']['conf']['mail_debug'] = nil
-default['dovecot']['conf']['verbose_ssl'] = nil
-default['dovecot']['conf']['log_timestamp'] = nil
-default['dovecot']['conf']['login_log_format_elements'] = nil
-default['dovecot']['conf']['login_log_format'] = nil
-default['dovecot']['conf']['mail_log_prefix'] = nil
-default['dovecot']['conf']['deliver_log_format'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_mail.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_mail.rb
deleted file mode 100644
index 1498a44d7d3497467d2cb3c27f3855083806029d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_mail.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_mail
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2016 Xabier de Zuazo
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-mail.conf
-
-default['dovecot']['conf']['mail_location'] = nil
-default['dovecot']['conf']['mail_shared_explicit_inbox'] = nil
-default['dovecot']['conf']['mail_uid'] = nil
-default['dovecot']['conf']['mail_gid'] = nil
-default['dovecot']['conf']['mail_privileged_group'] = nil
-default['dovecot']['conf']['mail_access_groups'] = nil
-default['dovecot']['conf']['mail_full_filesystem_access'] = nil
-default['dovecot']['conf']['mail_attribute_dict'] = nil
-default['dovecot']['conf']['mail_server_comment'] = nil
-default['dovecot']['conf']['mail_server_admin'] = nil
-default['dovecot']['conf']['mmap_disable'] = nil
-default['dovecot']['conf']['dotlock_use_excl'] = nil
-default['dovecot']['conf']['mail_fsync'] = nil
-default['dovecot']['conf']['mail_nfs_storage'] = nil
-default['dovecot']['conf']['mail_nfs_index'] = nil
-default['dovecot']['conf']['lock_method'] = nil
-default['dovecot']['conf']['mail_temp_dir'] = nil
-default['dovecot']['conf']['first_valid_uid'] = nil
-default['dovecot']['conf']['last_valid_uid'] = nil
-default['dovecot']['conf']['first_valid_gid'] = nil
-default['dovecot']['conf']['last_valid_gid'] = nil
-default['dovecot']['conf']['mail_max_keyword_length'] = nil
-default['dovecot']['conf']['valid_chroot_dirs'] = nil
-default['dovecot']['conf']['mail_chroot'] = nil
-default['dovecot']['conf']['auth_socket_path'] = nil
-default['dovecot']['conf']['mail_plugin_dir'] = nil
-default['dovecot']['conf']['mailbox_list_index'] = nil
-default['dovecot']['conf']['mail_cache_min_mail_count'] = nil
-default['dovecot']['conf']['mailbox_idle_check_interval'] = nil
-default['dovecot']['conf']['mail_save_crlf'] = nil
-default['dovecot']['conf']['mail_prefetch_count'] = nil
-default['dovecot']['conf']['mail_temp_scan_interval'] = nil
-default['dovecot']['conf']['maildir_stat_dirs'] = nil
-default['dovecot']['conf']['maildir_copy_with_hardlinks'] = nil
-default['dovecot']['conf']['maildir_very_dirty_syncs'] = nil
-default['dovecot']['conf']['maildir_broken_filename_sizes'] = nil
-default['dovecot']['conf']['maildir_empty_new'] = nil
-default['dovecot']['conf']['mbox_read_locks'] = nil
-default['dovecot']['conf']['mbox_write_locks'] = nil
-default['dovecot']['conf']['mbox_lock_timeout'] = nil
-default['dovecot']['conf']['mbox_dotlock_change_timeout'] = nil
-default['dovecot']['conf']['mbox_dirty_syncs'] = nil
-default['dovecot']['conf']['mbox_very_dirty_syncs'] = nil
-default['dovecot']['conf']['mbox_lazy_writes'] = nil
-default['dovecot']['conf']['mbox_min_index_size'] = nil
-default['dovecot']['conf']['mbox_md5'] = nil
-default['dovecot']['conf']['mdbox_rotate_size'] = nil
-default['dovecot']['conf']['mdbox_rotate_interval'] = nil
-default['dovecot']['conf']['mdbox_preallocate_space'] = nil
-default['dovecot']['conf']['mail_attachment_min_size'] = nil
-default['dovecot']['conf']['mail_attachment_fs'] = nil
-default['dovecot']['conf']['mail_attachment_hash'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_master.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_master.rb
deleted file mode 100644
index 15a9341dd8cfb44dbdbea51b21c1743e2e01cf65..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_master.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_master
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-master.conf
-
-default['dovecot']['conf']['default_process_limit'] = nil
-default['dovecot']['conf']['default_client_limit'] = nil
-default['dovecot']['conf']['default_vsz_limit'] = nil
-default['dovecot']['conf']['default_login_user'] = nil
-default['dovecot']['conf']['default_internal_user'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_replication.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_replication.rb
deleted file mode 100644
index e83f185535f530a1ad8df803a57f618264d6e6cb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_replication.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_replication
-# Author:: Vassilis Aretakis ()
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-director.conf
-
-default['dovecot']['conf']['doveadm_port'] = nil
-default['dovecot']['conf']['doveadm_password'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_ssl.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_ssl.rb
deleted file mode 100644
index c1acc0267b03947e761009b6421d3d273917e47e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_10_ssl.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_10_ssl
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/10-ssl.conf
-
-default['dovecot']['conf']['ssl'] = \
- if node['platform_family'] == 'suse' && node['platform_version'].to_i < 13
- false
- end
-
-case node['platform_family']
-when 'rhel', 'fedora'
- default['dovecot']['conf']['ssl_cert'] = '= 15.10
- default['dovecot']['conf']['ssl_cert'] = nil
- default['dovecot']['conf']['ssl_key'] = nil
- elsif node['platform_version'].to_f >= 14.04
- default['dovecot']['conf']['ssl_cert'] = '= 8
- default['dovecot']['conf']['ssl_cert'] = nil
- default['dovecot']['conf']['ssl_key'] = nil
- else
- default['dovecot']['conf']['ssl_cert'] = ')
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/15-lda.conf
-
-default['dovecot']['conf']['postmaster_address'] = nil
-default['dovecot']['conf']['hostname'] = nil
-default['dovecot']['conf']['quota_full_tempfail'] = nil
-default['dovecot']['conf']['sendmail_path'] = nil
-default['dovecot']['conf']['submission_host'] = nil
-default['dovecot']['conf']['rejection_subject'] = nil
-default['dovecot']['conf']['rejection_reason'] = nil
-default['dovecot']['conf']['recipient_delimiter'] = nil
-default['dovecot']['conf']['lda_original_recipient_header'] = nil
-default['dovecot']['conf']['lda_mailbox_autocreate'] = nil
-default['dovecot']['conf']['lda_mailbox_autosubscribe'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_20_lmtp.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_20_lmtp.rb
deleted file mode 100644
index ed5837befbfa9b38ae978c0a410aa2b94965c3b5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_20_lmtp.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_20_lmtp
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2016 Xabier de Zuazo
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# conf.d/20-lmtp.conf
-
-default['dovecot']['conf']['lmtp_proxy'] = nil
-default['dovecot']['conf']['lmtp_save_to_detail_mailbox'] = nil
-default['dovecot']['conf']['lmtp_rcpt_check_quota'] = nil
-default['dovecot']['conf']['lmtp_hdr_delivery_address'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot.rb
deleted file mode 100644
index 784bc02b22f2ad533fe453b8bafbd6dc4ce7653a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot.conf
-
-default['dovecot']['conf']['listen'] = nil
-default['dovecot']['conf']['base_dir'] = nil
-default['dovecot']['conf']['instance_name'] = nil
-default['dovecot']['conf']['login_greeting'] = nil
-default['dovecot']['conf']['login_trusted_networks'] = nil
-default['dovecot']['conf']['login_access_sockets'] = nil
-default['dovecot']['conf']['auth_proxy_self'] = nil
-default['dovecot']['conf']['verbose_proctitle'] = nil
-default['dovecot']['conf']['shutdown_clients'] = nil
-default['dovecot']['conf']['doveadm_worker_count'] = nil
-default['dovecot']['conf']['doveadm_socket_path'] = nil
-default['dovecot']['conf']['import_environment'] = nil
-default['dovecot']['conf']['dict'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_db.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_db.rb
deleted file mode 100644
index 9d5d59f378f5643266dcf41423278b6ff4a9469d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_db.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot_db
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot-db.conf.ext
-
-default['dovecot']['conf']['db'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_auth.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_auth.rb
deleted file mode 100644
index e7dfd066f9ab6026820a9c5163df8aba87a59310..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_auth.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot_dict_auth
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot-dict-auth.conf.ext
-
-default['dovecot']['conf']['dict_auth']['uri'] = nil
-default['dovecot']['conf']['dict_auth']['password_key'] = nil
-default['dovecot']['conf']['dict_auth']['user_key'] = nil
-default['dovecot']['conf']['dict_auth']['value_format'] = nil
-default['dovecot']['conf']['dict_auth']['iterate_prefix'] = nil
-default['dovecot']['conf']['dict_auth']['iterate_disable'] = nil
-default['dovecot']['conf']['dict_auth']['default_pass_scheme'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_sql.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_sql.rb
deleted file mode 100644
index 9b87baa75afd36df29bec65525135af9a274a484..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_dict_sql.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot_dict_sql
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot-dict-sql.conf.ext
-
-default['dovecot']['conf']['dict_sql']['connect'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_ldap.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_ldap.rb
deleted file mode 100644
index a8d56522ea149484d2cd060958ccad73125ea3a9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_ldap.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot_ldap
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot-ldap.conf.ext
-
-default['dovecot']['conf']['ldap']['hosts'] = nil
-default['dovecot']['conf']['ldap']['uris'] = nil
-default['dovecot']['conf']['ldap']['dn'] = nil
-default['dovecot']['conf']['ldap']['dnpass'] = nil
-default['dovecot']['conf']['ldap']['sasl_bind'] = nil
-default['dovecot']['conf']['ldap']['sasl_mech'] = nil
-default['dovecot']['conf']['ldap']['sasl_realm'] = nil
-default['dovecot']['conf']['ldap']['sasl_authz_id'] = nil
-default['dovecot']['conf']['ldap']['tls'] = nil
-default['dovecot']['conf']['ldap']['tls_ca_cert_file'] = nil
-default['dovecot']['conf']['ldap']['tls_ca_cert_file'] = nil
-default['dovecot']['conf']['ldap']['tls_ca_cert_dir'] = nil
-default['dovecot']['conf']['ldap']['tls_cipher_suite'] = nil
-default['dovecot']['conf']['ldap']['tls_cert_file'] = nil
-default['dovecot']['conf']['ldap']['tls_key_file'] = nil
-default['dovecot']['conf']['ldap']['tls_require_cert'] = nil
-default['dovecot']['conf']['ldap']['ldaprc_path'] = nil
-default['dovecot']['conf']['ldap']['debug_level'] = nil
-default['dovecot']['conf']['ldap']['auth_bind'] = nil
-default['dovecot']['conf']['ldap']['auth_bind_userdn'] = nil
-default['dovecot']['conf']['ldap']['ldap_version'] = nil
-default['dovecot']['conf']['ldap']['base'] = ''
-default['dovecot']['conf']['ldap']['deref'] = nil
-default['dovecot']['conf']['ldap']['scope'] = nil
-default['dovecot']['conf']['ldap']['user_attrs'] = nil
-default['dovecot']['conf']['ldap']['user_filter'] = nil
-default['dovecot']['conf']['ldap']['pass_attrs'] = nil
-default['dovecot']['conf']['ldap']['pass_filter'] = nil
-default['dovecot']['conf']['ldap']['iterate_attrs'] = nil
-default['dovecot']['conf']['ldap']['iterate_filter'] = nil
-default['dovecot']['conf']['ldap']['default_pass_scheme'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_sql.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_sql.rb
deleted file mode 100644
index 86818424184f05a06ccdbf377884408c6a2159aa..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_dovecot_sql.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_dovecot_sql
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# dovecot-sql.conf.ext
-
-default['dovecot']['conf']['sql']['driver'] = nil
-default['dovecot']['conf']['sql']['connect'] = nil
-default['dovecot']['conf']['sql']['default_pass_scheme'] = nil
-default['dovecot']['conf']['sql']['password_query'] = nil
-default['dovecot']['conf']['sql']['user_query'] = nil
-default['dovecot']['conf']['sql']['iterate_query'] = nil
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_files.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/conf_files.rb
deleted file mode 100644
index ff9c0b774617f0263b0399f8ed7ba8ad4dfea3a4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/conf_files.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: conf_files
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['conf_path'] = '/etc/dovecot'
-default['dovecot']['conf_files_user'] = 'root'
-default['dovecot']['conf_files_group'] = node['dovecot']['group']
-default['dovecot']['conf_files_mode'] = '00644'
-default['dovecot']['conf']['password_file'] =
- "#{node['dovecot']['conf_path']}/password"
-
-default['dovecot']['sensitive_files'] = %w(
- *.conf.ext
-)
-default['dovecot']['sensitive_files_mode'] = '00640'
-
-default['dovecot']['conf_files']['core'] = %w(
- conf.d/10-auth.conf
- conf.d/10-director.conf
- conf.d/10-logging.conf
- conf.d/10-mail.conf
- conf.d/10-master.conf
- conf.d/10-ssl.conf
- conf.d/10-tcpwrapper.conf
- conf.d/15-lda.conf
- conf.d/15-mailboxes.conf
- conf.d/15-replication.conf
- conf.d/90-acl.conf
- conf.d/90-plugin.conf
- conf.d/90-quota.conf
- conf.d/auth-checkpassword.conf.ext
- conf.d/auth-deny.conf.ext
- conf.d/auth-dict.conf.ext
- conf.d/auth-master.conf.ext
- conf.d/auth-passwdfile.conf.ext
- conf.d/auth-sql.conf.ext
- conf.d/auth-static.conf.ext
- conf.d/auth-system.conf.ext
- conf.d/auth-vpopmail.conf.ext
- dovecot.conf
- dovecot-db.conf.ext
- dovecot-dict-auth.conf.ext
- dovecot-dict-sql.conf.ext
- dovecot-sql.conf.ext
-)
-default['dovecot']['conf_files']['imap'] = %w(
- conf.d/20-imap.conf
-)
-default['dovecot']['conf_files']['pop3'] = %w(
- conf.d/20-pop3.conf
-)
-default['dovecot']['conf_files']['lmtp'] = %w(
- conf.d/20-lmtp.conf
-)
-default['dovecot']['conf_files']['sieve'] = %w(
- conf.d/20-managesieve.conf
- conf.d/90-sieve.conf
-)
-default['dovecot']['conf_files']['ldap'] = %w(
- dovecot-ldap.conf.ext
- conf.d/auth-ldap.conf.ext
-)
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/create_pwfile.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/create_pwfile.rb
deleted file mode 100644
index a822e24fbef389f0b75e663d43cdb1698ccad6b7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/create_pwfile.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: create_pwfile
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2017 Xabier de Zuazo
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['databag_name'] = 'dovecot'
-default['dovecot']['databag_users_item'] = 'users'
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/default.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/default.rb
deleted file mode 100644
index d06934cd4cf437390d79db38564c8df78a9c1a1c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/default.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: default
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['install_from'] = 'package'
-
-case node['platform_family']
-when 'rhel', 'fedora'
- default['dovecot']['lib_path'] = '/usr/libexec/dovecot'
-when 'suse'
- default['dovecot']['lib_path'] = '/var/run/dovecot'
-# when 'debian'
-else
- default['dovecot']['lib_path'] = '/usr/lib/dovecot'
-end
-
-default['dovecot']['user'] = 'dovecot'
-default['dovecot']['group'] = node['dovecot']['user']
-default['dovecot']['user_homedir'] = node['dovecot']['lib_path']
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/namespaces.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/namespaces.rb
deleted file mode 100644
index 00e944bc347dac539c6c932595e877cf5c7e0086..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/namespaces.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: default
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['namespaces'] = []
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/ohai_plugin.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/ohai_plugin.rb
deleted file mode 100644
index 5b7d39a6e84a9ad788791a174a89ba8a2c134ac5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/ohai_plugin.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: ohai_plugin
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['ohai_plugin']['build-options'] = true
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/packages.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/packages.rb
deleted file mode 100644
index be831b027630122a751a924b8c286f6605684ea0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/packages.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: packages
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-case node['platform_family']
-when 'rhel', 'fedora'
- default['dovecot']['packages']['core'] = %w(dovecot)
- default['dovecot']['packages']['imap'] = [] # included inside core
- default['dovecot']['packages']['pop3'] = [] # included inside core
- default['dovecot']['packages']['lmtp'] = [] # included inside core
- default['dovecot']['packages']['sieve'] = %w(dovecot-pigeonhole)
- default['dovecot']['packages']['ldap'] = [] # included inside core
- default['dovecot']['packages']['sqlite'] = [] # included inside core
- default['dovecot']['packages']['mysql'] = %w(dovecot-mysql)
- default['dovecot']['packages']['pgsql'] = %w(dovecot-pgsql)
-when 'suse'
- default['dovecot']['packages']['core'] = %w(dovecot)
- default['dovecot']['packages']['imap'] = [] # included inside core
- default['dovecot']['packages']['pop3'] = [] # included inside core
- default['dovecot']['packages']['lmtp'] = [] # included inside core
- default['dovecot']['packages']['sieve'] = [] # included inside core
- default['dovecot']['packages']['ldap'] = [] # included inside core
- default['dovecot']['packages']['sqlite'] = %w(dovecot-backend-sqlite)
- default['dovecot']['packages']['mysql'] = %w(dovecot-backend-mysql)
- default['dovecot']['packages']['pgsql'] = %w(dovecot-backend-pgsql)
-when 'arch' # not tested
- default['dovecot']['packages']['core'] = %w(dovecot)
- default['dovecot']['packages']['imap'] = [] # included inside core
- default['dovecot']['packages']['pop3'] = [] # included inside core
- default['dovecot']['packages']['lmtp'] = [] # included inside core
- default['dovecot']['packages']['sieve'] = %w(pigeonhole)
- default['dovecot']['packages']['ldap'] = [] # included inside core
- default['dovecot']['packages']['sqlite'] = [] # included inside core
- default['dovecot']['packages']['mysql'] = [] # included inside core
- default['dovecot']['packages']['pgsql'] = [] # included inside core
-else # when 'debian'
- default['dovecot']['packages']['core'] = %w(dovecot-core dovecot-gssapi)
- default['dovecot']['packages']['imap'] = %w(dovecot-imapd)
- default['dovecot']['packages']['pop3'] = %w(dovecot-pop3d)
- default['dovecot']['packages']['lmtp'] = %w(dovecot-lmtpd)
- default['dovecot']['packages']['sieve'] =
- %w(dovecot-sieve dovecot-managesieved)
- default['dovecot']['packages']['ldap'] = %w(dovecot-ldap)
- default['dovecot']['packages']['sqlite'] = %w(dovecot-sqlite)
- default['dovecot']['packages']['mysql'] = %w(dovecot-mysql)
- default['dovecot']['packages']['pgsql'] = %w(dovecot-pgsql)
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/plugins.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/plugins.rb
deleted file mode 100644
index adacb7c6a66b31ee5a34c65f73d901b1e9d77637..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/plugins.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: plugins
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['conf']['mail_plugins'] = []
-
-default['dovecot']['plugins']['sieve'] =
- {
- 'sieve' => '~/.dovecot.sieve',
- 'sieve_dir' => '~/sieve'
- }
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/protocols.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/protocols.rb
deleted file mode 100644
index b4ec27241b319e9102ba2816b4301d1acc5ac2c7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/protocols.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: protocols
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['protocols'] = {}
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/service.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/service.rb
deleted file mode 100644
index 5826dbe22858066c45f482c7c461e33532a5f8ac..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/service.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: service
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2015 Xabier de Zuazo
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['service']['name'] = 'dovecot'
-
-default['dovecot']['service']['supports'] =
- if node['platform_family'] == 'suse' && node['platform_version'].to_i < 13
- { restart: true, reload: false, status: true }
- else
- { restart: true, reload: true, status: true }
- end
-
-default['dovecot']['service']['provider'] =
- if node['platform'] == 'ubuntu' &&
- Gem::Requirement.new(['>= 13.10', '< 15'])
- .satisfied_by?(Gem::Version.new(node['platform_version']))
- Chef::Provider::Service::Upstart
- elsif (node['platform'] == 'debian' && node['platform_version'].to_i >= 8) ||
- (node['platform'] == 'ubuntu' && node['platform_version'].to_i >= 15)
- Chef::Provider::Service::Debian
- elsif node['platform_family'] == 'suse' && node['platform_version'].to_i == 12
- Chef::Provider::Service::Redhat
- end
diff --git a/lc-gdn-chef/cookbooks/dovecot/attributes/services.rb b/lc-gdn-chef/cookbooks/dovecot/attributes/services.rb
deleted file mode 100644
index 84d5815e5e52d0f14731772fce5d4fe2848ccd20..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/attributes/services.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Attributes:: services
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-default['dovecot']['services'] = {}
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/auth.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/auth.rb
deleted file mode 100644
index 5b22629cd9e07738d53a5f18c4f7335a566c966a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/auth.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: auth
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-module DovecotCookbook
- # Helpers module to check if the configuration contains a valid user or passdb
- module Auth
- def self.authdb?(type, auth)
- auth.is_a?(Hash) && !auth.empty? &&
- (auth[type].is_a?(Hash) || auth[type].is_a?(Array))
- end
-
- def self.passdb?(auth)
- authdb?('passdb', auth)
- end
-
- def self.userdb?(auth)
- authdb?('userdb', auth)
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/conf.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/conf.rb
deleted file mode 100644
index 718cc8d0fa4ca1d02ff1d27f96f3e168c431503c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/conf.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: conf
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-require 'erubis'
-
-module DovecotCookbook
- # Helper module to generate configuration structures
- module Conf
- def self.name(str)
- str =~ /\s/ ? "\"#{str}\"" : str
- end
-
- def self.value(v, default = nil)
- case v
- when nil then default.to_s
- when true then 'yes'
- when false then 'no'
- when Array then v.join(' ')
- else
- v.to_s
- end
- end
-
- def self.attribute(conf, k, default = nil)
- v = conf[k]
- v.nil? ? "##{k} = #{value(default)}" : "#{k} = #{value(v)}"
- end
-
- def self.evaluate_template(template, context)
- context = context.merge(dovecot_conf: DovecotCookbook::Conf)
- eruby = Erubis::Eruby.new(template)
- eruby.evaluate(context)
- end
-
- def self.protocols(conf)
- # dovecot: config: Fatal: Error in configuration file
- # /etc/dovecot/dovecot.conf: protocols: Unknown protocol: lda
- ignore_protos = %w(lda)
- protos = DovecotCookbook::Protocols.list(conf) - ignore_protos
- protos.empty? ? 'none' : protos.join(' ')
- end
-
- def self.authdb(driver, type, conf)
- template = DovecotCookbook::Conf::Templates::AUTHDB
- evaluate_template(template, driver: driver, type: type, conf: conf)
- end
-
- def self.plugin(_name, conf)
- template = DovecotCookbook::Conf::Templates::PLUGIN
- evaluate_template(template, conf: conf)
- end
-
- def self.namespace(ns)
- template = DovecotCookbook::Conf::Templates::NAMESPACE
- evaluate_template(template, ns: ns)
- end
-
- def self.protocol(name, conf)
- template = DovecotCookbook::Conf::Templates::PROTOCOL
- evaluate_template(template, name: name, conf: conf)
- end
-
- def self.service(name, conf)
- template = DovecotCookbook::Conf::Templates::SERVICE
- evaluate_template(template, name: name, conf: conf)
- end
-
- def self.map(map)
- template = DovecotCookbook::Conf::Templates::MAP
- evaluate_template(template, map: map)
- end
-
- def self.require?(req, conf)
- case req
- when 'core' then true
- when 'imap', 'pop3', 'lmtp' then Conf::Require.protocol?(req, conf)
- when 'sieve' then Conf::Require.plugin?('sieve', conf)
- when 'ldap' then Conf::Require.ldap?(conf['conf'])
- when 'sqlite', 'mysql', 'pgsql' then Conf::Require.db?(req, conf['conf'])
- else
- raise "Unknown configuration requirement: #{req.inspect}"
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/conf_require.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/conf_require.rb
deleted file mode 100644
index a527fda90308fb83dbe21638cec537290aa114a5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/conf_require.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: conf_require
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-require 'erubis'
-
-module DovecotCookbook
- module Conf
- # Helper module to check configuration requirements
- module Require
- def self.protocol?(proto, gconf)
- DovecotCookbook::Protocols.enabled?(proto, gconf['protocols'])
- end
-
- def self.plugin?(plugin, gconf)
- DovecotCookbook::Plugins.required?(plugin, gconf)
- end
-
- def self.ldap?(conf)
- [true, false].include?(conf['ldap']['auth_bind'])
- end
-
- def self.db?(db, conf)
- conf['sql']['driver'] == db
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/conf_templates.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/conf_templates.rb
deleted file mode 100644
index 7d2ea387cfc89bda81ddab32257f78ddda588c00..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/conf_templates.rb
+++ /dev/null
@@ -1,150 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: conf_templates
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-require 'erubis'
-
-module DovecotCookbook
- module Conf
- # Configuration structure templates
- module Templates
- AUTHDB = <<-EOT
-<% confs = @conf.is_a?(Array)? @conf : [@conf]
- confs.each do |conf| -%>
-<%= @dovecot_conf.name(@type) %> {
- <% unless conf.key?('driver') -%>
- driver = <%= @driver %>
- <% end -%>
- <% conf.sort.each do |key, value|
- unless value.nil?
- -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end
- end
- -%>
-}
-<% end -%>
-EOT
- .freeze unless defined?(Conf::Templates::AUTHDB)
-
- PLUGIN = <<-EOT
-plugin {
- <% @conf.sort.each do |key, value|
- unless value.nil?
- -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end
- end -%>
-}
-EOT
- .freeze unless defined?(Conf::Templates::PLUGIN)
-
- NAMESPACE = <<-EOT
-namespace <%= @dovecot_conf.name(@ns['name']) %> {
- <% if @ns['mailboxes'].is_a?(Array) || @ns['mailboxes'].is_a?(Hash)
- mailboxes =
- if @ns['mailboxes'].is_a?(Array)
- @ns['mailboxes']
- else
- [@ns['mailboxes']]
- end
- mailboxes.each do |mailbox|
- mailbox.sort.each do |key, values|
- -%>
- mailbox <%= @dovecot_conf.name(key) %> {
- <% values.sort.each do |key, value| -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end -%>
- }
- <% end -%>
- <% end -%>
- <% end -%>
- <% @ns.sort.each do |key, value|
- next if key == 'mailboxes'
- -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end -%>
-}
-EOT
- .freeze unless defined?(Conf::Templates::NAMESPACE)
-
- PROTOCOL = <<-EOT
-protocol <%= @dovecot_conf.name(@name) %> {
- <% @conf.sort.each do |key, value| -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end -%>
-}
-EOT
- .freeze unless defined?(Conf::Templates::PROTOCOL)
-
- SERVICE = <<-EOT
-service <%= @dovecot_conf.name(@name) %> {
- <% if @conf['listeners'].is_a?(Array) || @conf['listeners'].is_a?(Hash)
- listeners =
- if @conf['listeners'].is_a?(Array)
- @conf['listeners']
- else
- [@conf['listeners']]
- end
- listeners.each do |listener|
- listener.sort.each do |service, values|
- service_proto = service.split(':')[0]
- service_name = service.split(':')[1]
- -%>
- <%= service_proto %>_listener <%= @dovecot_conf.name(service_name) %> {
- <% values.sort.each do |key, value|-%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end -%>
- }
- <% end -%>
- <% end -%>
- <% end -%>
- <% @conf.sort.each do |key, value|
- next if key == 'listeners'
- -%>
- <%= key %> = <%= @dovecot_conf.value(value) %>
- <% end -%>
-}
-EOT
- .freeze unless defined?(Conf::Templates::SERVICE)
-
- MAP = <<-EOT
-map {
-<% @map.sort.each do |k, v|
- if v.is_a?(Hash)
--%>
- <%= @dovecot_conf.name(k) %> {
-<%
- v.sort.each do |k2, v2|
--%>
- <%= k2 %> = <%= @dovecot_conf.value(v2) %>
-<% end -%>
- }
-<% else -%>
- <%= k %> = <%= @dovecot_conf.value(v) %>
-<% end
- end
--%>
-}
-EOT
- .freeze unless defined?(Conf::Templates::MAP)
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/plugins.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/plugins.rb
deleted file mode 100644
index 34a3003a0e258b64e391e728cad7366bd7543b58..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/plugins.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: plugins
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-module DovecotCookbook
- # Helper module to check required plugins
- module Plugins
- def self.require_plugin?(plugin, conf)
- conf.key?('mail_plugins') &&
- conf['mail_plugins'].include?(plugin)
- end
-
- def self.required?(plugin, attrs)
- attrs = attrs.to_hash
- return true if require_plugin?(plugin, attrs['conf'])
- attrs['protocols'].sort.each do |_protocol, conf|
- return true if conf.is_a?(Hash) && require_plugin?(plugin, conf)
- end
- false
- end
-
- def self.list_unknown(plugins)
- known_plugins = %w(
- mail_log
- quota
- acl
- sieve
- )
- plugins.keys - known_plugins
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/protocols.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/protocols.rb
deleted file mode 100644
index c275f0525e0b6da07e4ba87c419577ce3e13c3ce..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/protocols.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: protocols
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-module DovecotCookbook
- # Helper module to check enabled protocols
- module Protocols
- def self.enabled?(proto, protos)
- protos.key?(proto) && protos[proto].is_a?(Hash)
- end
-
- def self.list(protos)
- list = []
- protos.sort.each { |proto, conf| list.push(proto) if conf.is_a?(Hash) }
- list
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/libraries/pwfile.rb b/lc-gdn-chef/cookbooks/dovecot/libraries/pwfile.rb
deleted file mode 100644
index 0e47f8fdb634b1489a7614dda3846933bc5f4576..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/libraries/pwfile.rb
+++ /dev/null
@@ -1,163 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Library:: pwfile
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-module DovecotCookbook
- # Helper module to check password file and import it
- module Pwfile
- extend Chef::Mixin::ShellOut
-
- # Checks if the file exists locally.
- #
- # @param localdata [String] File path.
- # @return [Boolean] Whether the file exists.
- def self.exists?(localdata)
- ::File.exist?(localdata)
- end
-
- # Reads a password file from this and it returns its contents as a hash.
- #
- # @param inputfile [String] File path.
- # @return [Hash] The file contents.
- def self.file_to_hash(inputfile)
- output_entries = {}
- File.open(inputfile, File::RDONLY | File::CREAT, 0640) do |passwordfile|
- passwordfile.readlines.each do |line|
- user, data = fileline_to_userdb_hash(line)
- output_entries[user] = data
- end
- end
- output_entries
- end
-
- # Returns Password file in userdb style hash and if exists.
- #
- # @param input [String] The path of the file.
- # @return [Array] An array with two values: The input in hash format and
- # whether the file exists on the disk or not.
- # @api public
- def self.passfile_read(input)
- [file_to_hash(input), exists?(input)]
- end
-
- # Returns a hash of details taken from the userdb file line.
- #
- # @param input [String] A line of input.
- def self.fileline_to_userdb_hash(input)
- data = [nil] * 7
- if input.strip.split(':').length == 2
- user, data[0] = input.strip.split(':')
- else
- user = input.strip.split(':')[0]
- data = input.strip.split(':')[1..7]
- end
- [user, data]
- end
-
- # Returns an array with 8 values to use with user copy.
- #
- # @param key [String] The name, or the first value to be included in the
- # final array.
- # @param value [String, Array] An array of values to be added to the key.
- # @return [Array] An array of length 8 with the format
- # `[key, value1, value2, ..., value7]`. The array is filled with `nil`
- # values if there are less than 7 values in `value` array.
- def self.dbentry_to_array(key, value)
- if value.is_a?(Array)
- [key] + (value + ([nil] * (7 - value.size)))
- else
- [key, value] + ([nil] * 6)
- end
- end
-
- # Checks if a plain text password matches a specific encrypted password.
- #
- # @param hashed_pw [String] The password encrypted.
- # @param plaintext_pw [String] The password in clear text.
- # @return [Boolean] Whether the two passwords are the same.
- def self.password_valid?(hashed_pw, plaintext_pw)
- shell_out("/usr/bin/doveadm pw -t '#{hashed_pw}' -p '#{plaintext_pw}'")
- .exitstatus == 0
- end
-
- # Checks if two arrays contain the same values.
- #
- # @param array1 [Array] The first array.
- # @param array2 [Array] The second array.
- # @return [Boolean] Returns `true` if both arrays contain the same values.
- # @api public
- def self.arrays_same?(array1, array2)
- (array1 - array2).empty? && (array2 - array1).empty?
- end
-
- # Encrypts a plain text password.
- #
- # @param plaintextpass [String] The password to encrypt.
- # @return [String] The password encrypted.
- def self.encrypt_password(plaintextpass)
- shell_out("/usr/bin/doveadm pw -s MD5 -p '#{plaintextpass}'")
- .stdout.tr("\n", '')
- end
-
- # Generates the user password on the disk password file only if required.
- #
- # @param input_creds [Array] Credentials on disk for a user.
- # @param plaintextpass [String] The password to encrypt.
- # @param updated [Boolean] Previous values of whether any user has been
- # updated.
- # @param file_exists [Boolean] Whether the local file already exists.
- # @return [Array] An array with two values: the encrypted password and
- # whether it needs to be updated on disk.
- def self.generate_userpass(input_creds, plaintextpass, updated, file_exists)
- if !input_creds.nil? && file_exists == true &&
- password_valid?(input_creds[0], plaintextpass)
- return [input_creds[0], updated]
- end
- [encrypt_password(plaintextpass), true]
- end
-
- # Update users credentials only if required.
- #
- # The `credentials` parameter is updated with all the credentials.
- #
- # @param databag_users [Hash] User list read from the Data Bag.
- # @param current_users [Hash] User list read from a file on the disk.
- # @param pwfile_exists [Boolean] Whether exists a file on the disk.
- # @param prev_updated [Boolean] Previous values of this function return
- # value.
- # @param credentials [Array] The list of credentials. This value is
- # populated by this function with the generated (encrypted) credentials
- # ready to be written to disk.
- # @return [Boolean] `true` if any user has been updated.
- # @api public
- def self.compile_users(
- databag_users, current_users, pwfile_exists, prev_updated, credentials
- )
- databag_users.reduce(prev_updated) do |updated, (username, user_details)|
- current_user = dbentry_to_array(username, user_details)
- current_user[1], updated =
- generate_userpass(
- current_users[username], current_user[1], updated, pwfile_exists)
- credentials.push(current_user)
- updated
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/metadata.rb b/lc-gdn-chef/cookbooks/dovecot/metadata.rb
deleted file mode 100644
index b4361d5284493a3a1a3c1b1129dd62c4ae94c87b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/metadata.rb
+++ /dev/null
@@ -1,1884 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2016 Xabier de Zuazo
-# Copyright:: Copyright (c) 2013-2015 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-name 'dovecot'
-maintainer 'Xabier de Zuazo'
-maintainer_email 'xabier@zuazo.org'
-license 'Apache 2.0'
-description <<-EOH
-Installs and configures Dovecot, open source IMAP and POP3 email server.
-EOH
-long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
-version '3.2.1'
-
-if respond_to?(:source_url)
- source_url "https://github.com/zuazo/#{name}-cookbook"
-end
-if respond_to?(:issues_url)
- issues_url "https://github.com/zuazo/#{name}-cookbook/issues"
-end
-
-chef_version '>= 12' if respond_to?(:chef_version)
-
-supports 'amazon'
-supports 'centos', '>= 6.0'
-supports 'debian', '>= 7.0'
-supports 'fedora', '>= 18.0'
-supports 'opensuse'
-supports 'oracle', '>= 6.0'
-supports 'scientific', '>= 6.0'
-supports 'suse'
-supports 'ubuntu', '>= 12.04'
-
-depends 'ohai', '~> 4.0'
-
-recipe 'dovecot::default', 'Installs and configures Dovecot.'
-recipe 'dovecot::user', 'Creates the dovecot system user.'
-recipe 'dovecot::conf_files', 'Generates all the configuration files.'
-recipe 'dovecot::ohai_plugin',
- 'Provides an Ohai plugin for reading dovecot install information.'
-recipe 'dovecot::from_package', 'Installs the required packages.'
-recipe 'dovecot::service', 'Configures the Dovecot service.'
-recipe 'dovecot::create_pwfile', 'Creates a userdb password file from databag.'
-
-attribute 'dovecot/install_from',
- display_name: 'dovecot install method',
- description:
- 'Determines how Dovecot is installed from. Only `package` is '\
- 'supported for now.',
- type: 'string',
- required: 'optional',
- default: '"package"'
-
-attribute 'dovecot/user',
- display_name: 'dovecot user',
- description: 'Dovecot system user. Should no be changed.',
- type: 'string',
- required: 'optional',
- default: '"dovecot"'
-
-attribute 'dovecot/group',
- display_name: 'dovecot group',
- description: 'Dovecot system group. Should no be changed.',
- type: 'string',
- required: 'optional',
- default: '"dovecot"'
-
-attribute 'dovecot/databag_name',
- display_name: 'Databag name',
- description: 'The name of the databag to use',
- type: 'string',
- required: 'optional',
- default: '"dovecot"'
-
-attribute 'dovecot/databag_users_item',
- display_name: 'Databag users item name',
- description: 'The name item to put the users in',
- type: 'string',
- required: 'optional',
- default: '"users"'
-
-attribute 'dovecot/user_homedir',
- display_name: 'dovecot homedir',
- description: 'Dovecot system user home directory.',
- calculated: true,
- type: 'string',
- required: 'optional'
-
-attribute 'dovecot/lib_path',
- display_name: 'dovecot library path',
- description: 'Dovecot library path. Should no be changed.',
- calculated: true,
- type: 'string',
- required: 'optional'
-
-attribute 'dovecot/conf_path',
- display_name: 'dovecot configuration path',
- description:
- 'Dovecot configruration files path. Should no be changed.',
- type: 'string',
- required: 'optional',
- default: '"/etc/dovecot"'
-
-attribute 'dovecot/conf_files_user',
- display_name: 'dovecot configuration files user',
- description: 'System user owner of configuration files.',
- type: 'string',
- required: 'optional',
- default: '"root"'
-
-attribute 'dovecot/conf_files_group',
- display_name: 'dovecot configuration files group',
- description: 'System group owner of configuration files.',
- type: 'string',
- required: 'optional',
- default: 'node["dovecot"]["group"]'
-
-attribute 'dovecot/conf_files_mode',
- display_name: 'dovecot configuration files mode',
- description: 'Configuration files system file mode bits.',
- type: 'string',
- required: 'optional',
- default: '00644'
-
-attribute 'dovecot/conf/password_file',
- display_name: 'path of password file',
- description: 'The path and filename of the password file',
- type: 'string',
- required: 'optional',
- default: 'node["dovecot"]["conf_path"]/password'
-
-attribute 'dovecot/sensitive_files',
- display_name: 'dovecot sensitve files',
- description:
- 'An array of dovecot sensitive configuration files. Each array '\
- 'item can be a glob expression or a fixed file name. This file '\
- 'names should be relative to node["dovecot"]["conf_path"] '\
- 'directory. Example: [ "dovecot-sql.conf.ext", "*-auth.conf.ext", '\
- '"conf.d/auth-supersecret.conf.ext" ]',
- type: 'array',
- required: 'optional',
- default: %w(*.conf.ext)
-
-attribute 'dovecot/sensitive_files_mode',
- display_name: 'dovecot sensitve files mode',
- description:
- 'Configuration files system file mode bits for sensitve files.',
- type: 'string',
- required: 'optional',
- default: '00640'
-
-attribute 'dovecot/conf_files/core',
- display_name: 'dovecot core configuration files',
- description: 'Dovecot core configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(
- conf.d/10-auth.conf
- conf.d/10-director.conf
- conf.d/10-logging.conf
- conf.d/10-mail.conf
- conf.d/10-master.conf
- conf.d/10-ssl.conf
- conf.d/10-tcpwrapper.conf
- conf.d/15-lda.conf
- conf.d/15-mailboxes.conf
- conf.d/90-acl.conf
- conf.d/90-plugin.conf
- conf.d/90-quota.conf
- conf.d/auth-checkpassword.conf.ext
- conf.d/auth-deny.conf.ext
- conf.d/auth-master.conf.ext
- conf.d/auth-passwdfile.conf.ext
- conf.d/auth-sql.conf.ext
- conf.d/auth-static.conf.ext
- conf.d/auth-system.conf.ext
- conf.d/auth-vpopmail.conf.ext
- dovecot.conf
- dovecot-db.conf.ext
- dovecot-dict-sql.conf.ext
- dovecot-sql.conf.ext
- )
-
-attribute 'dovecot/conf_files/imap',
- display_name: 'dovecot imap configuration files',
- description: 'Dovecot IMAP configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(conf.d/20-imap.conf)
-
-attribute 'dovecot/conf_files/pop3',
- display_name: 'dovecot pop3 configuration files',
- description: 'Dovecot POP3 configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(conf.d/20-pop3.conf)
-
-attribute 'dovecot/conf_files/lmtp',
- display_name: 'dovecot lmtp configuration files',
- description: 'Dovecot LMTP configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(conf.d/20-lmtp.conf)
-
-attribute 'dovecot/conf_files/sieve',
- display_name: 'dovecot sieve configuration files',
- description: 'Dovecot Sieve configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(
- conf.d/20-managesieve.conf
- conf.d/90-sieve.conf
- )
-
-attribute 'dovecot/conf_files/ldap',
- display_name: 'dovecot ldap configuration files',
- description: 'Dovecot LDAP configuration files list.',
- type: 'array',
- required: 'optional',
- default: %w(
- dovecot-ldap.conf.ext
- conf.d/auth-ldap.conf.ext
- )
-
-attribute 'dovecot/auth',
- display_name: 'dovecot auth',
- description:
- 'Dovecot Authentication Databases as a hash of hashes. Supported '\
- 'authdbs: checkpassword, deny, ldap, master, passwdfile, sql, '\
- 'system and vpopmail.',
- type: 'hash',
- required: 'optional',
- default: {}
-
-attribute 'dovecot/namespaces',
- display_name: 'dovecot namespaces',
- description: 'Dovecot Namespaces as an array of hashes.',
- type: 'array',
- required: 'optional',
- default: []
-
-attribute 'dovecot/plugins',
- display_name: 'dovecot plugins',
- description:
- 'Dovecot Plugins configuration as a hash of hashes. Supported '\
- 'plugins: mail_log, acl and quota.',
- type: 'hash',
- required: 'optional',
- default: {
- 'sieve' => {
- 'sieve' => '~/.dovecot.sieve',
- 'sieve_dir' => '~/sieve'
- }
- }
-
-attribute 'dovecot/protocols',
- display_name: 'dovecot protocols',
- description:
- 'Dovecot Protocols configuration as a hash of hashes. Supported '\
- 'protocols: lda, imap, lmtp, sieve and pop3.',
- type: 'hash',
- required: 'optional',
- default: {}
-
-attribute 'dovecot/services',
- display_name: 'dovecot services',
- description:
- 'Dovecot Services configuration as a hash of hashes. Supported '\
- 'services: anvil, director, imap-login, pop3-login, lmtp, imap, '\
- 'pop3, auth, auth-worker, dict, tcpwrap, managesieve-login, '\
- 'managesieve, quota-status, quota-warning, doveadm, config, '\
- 'aggregator, replicator',
- type: 'hash',
- required: 'optional',
- default: {}
-
-grouping 'dovecot/conf',
- title: 'dovecot conf',
- description: 'Dovecot configuration values'
-
-attribute 'dovecot/conf/mail_plugins',
- display_name: 'dovecot mail plugins',
- description: 'Dovecot default enabled mail_plugins.',
- type: 'array',
- required: 'optional',
- default: []
-
-grouping 'dovecot/packages',
- title: 'dovecot packages',
- description: 'Dovecot packages'
-
-attribute 'dovecot/packages/core',
- display_name: 'dovecot core packages',
- description: 'Dovecot core package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/imap',
- display_name: 'dovecot imap packages',
- description: 'Dovecot IMAP package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/pop3',
- display_name: 'dovecot pop3 packages',
- description: 'Dovecot POP3 package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/lmtp',
- display_name: 'dovecot lmtp packages',
- description: 'Dovecot LMTP package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/sieve',
- display_name: 'dovecot sieve packages',
- description: 'Dovecot Sieve package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/ldap',
- display_name: 'dovecot ldap packages',
- description: 'Dovecot LDAP package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/sqlite',
- display_name: 'dovecot sqlite packages',
- description: 'Dovecot SQLite package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/mysql',
- display_name: 'dovecot mysql packages',
- description: 'Dovecot MySQL package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/packages/pgsql',
- display_name: 'dovecot pgsql packages',
- description: 'Dovecot PostgreSQL package names array.',
- type: 'array',
- required: 'optional',
- calculated: true
-
-grouping 'dovecot/service',
- title: 'dovecot service',
- description: 'Dovecot system service'
-
-attribute 'dovecot/service/name',
- display_name: 'dovecot service name',
- description: 'Dovecot system service name.',
- type: 'string',
- required: 'optional',
- default: 'dovecot'
-
-attribute 'dovecot/service/supports',
- display_name: 'dovecot service supports',
- description: 'Dovecot service supported actions.',
- type: 'hash',
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/service/provider',
- display_name: 'dovecot service provider',
- description: 'Dovecot service Chef provider class.',
- type: 'string', # 'class' really
- required: 'optional',
- calculated: true
-
-attribute 'dovecot/ohai_plugin/build-options',
- display_name: 'dovecot ohai plugin build options',
- description:
- 'Whether to enable reading build options inside ohai plugin. Can '\
- 'be disabled to be lighter.',
- type: 'string',
- required: 'optional',
- choice: %w(true false),
- default: 'true'
-
-# dovecot.conf
-
-attribute 'dovecot/conf/listen',
- display_name: 'listen',
- description:
- 'A comma separated list of IPs or hosts where to listen in for '\
- 'connections.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/base_dir',
- display_name: 'dovecot base dir',
- description: 'Base directory where to store runtime data.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/instance_name',
- display_name: 'instance name',
- description:
- 'Name of this instance. Used to prefix all Dovecot processes in '\
- 'ps output.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/login_greeting',
- display_name: 'login greeting',
- description: 'Greeting message for clients.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/login_trusted_networks',
- display_name: 'login trusted networks',
- description: 'Space separated list of trusted network ranges.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/login_access_sockets',
- display_name: 'login access sockets',
- description: 'Space separated list of login access check sockets.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_proxy_self',
- display_name: 'auth proxy self',
- description:
- 'With proxy_maybe=yes if proxy destination matches any of these '\
- 'IPs, don\'t do proxying.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/verbose_proctitle',
- display_name: 'verbose proctitle',
- description: 'Show more verbose process titles (in ps).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/shutdown_clients',
- display_name: 'shutdown clients',
- description:
- 'Should all processes be killed when Dovecot master process shuts '\
- 'down.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/doveadm_port',
- display_name: 'doveadm port',
- description:
- 'If non-zero, doveadm cli will use this port to communicate with '\
- 'doveadm server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/doveadm_password',
- display_name: 'doveadm password',
- description:
- 'If not empty, the doveadm server replication communication will '\
- 'use that secret.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/doveadm_worker_count',
- display_name: 'doveadm worker count',
- description:
- 'If non-zero, run mail commands via this many connections to '\
- 'doveadm server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/doveadm_socket_path',
- display_name: 'doveadm socket path',
- description:
- 'UNIX socket or host:port used for connecting to doveadm server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/import_environment',
- display_name: 'import environment',
- description:
- 'Space separated list of environment variables that are preserved '\
- 'on Dovecot startup and his childs.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/dict',
- display_name: 'dict',
- description: 'Dictionary server settings as a hash.',
- type: 'hash',
- required: 'optional',
- default: nil
-
-# conf.d/10-auth.conf
-
-attribute 'dovecot/conf/disable_plaintext_auth',
- display_name: 'disable plaintext auth',
- description:
- 'Disable LOGIN command and all other plaintext authentications '\
- 'unless SSL/TLS is used.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_cache_size',
- display_name: 'auth cache size',
- description:
- 'Authentication cache size (e.g. 10M). 0 means it\'s disabled.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_cache_ttl',
- display_name: 'auth cache ttl',
- description: 'Time to live for cached data.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_cache_negative_ttl',
- display_name: 'auth cache negative ttl',
- description:
- 'TTL for negative hits (user not found, password mismatch).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_realms',
- display_name: 'disable plaintext auth',
- description:
- 'Space separated list (or array) of realms for SASL '\
- 'authentication mechanisms that need them.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_default_realm',
- display_name: 'auth default realm',
- description: 'Default realm/domain to use if none was specified.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_username_chars',
- display_name: 'auth username chars',
- description: 'List of allowed characters in username.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_username_translation',
- display_name: 'auth username translation',
- description:
- 'Username character translations before it\'s looked up from '\
- 'databases.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_username_format',
- display_name: 'auth username format',
- description:
- 'Username formatting before it\'s looked up from databases.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_master_user_separator',
- display_name: 'auth master user separator',
- description:
- 'If you want to allow master users to log in by specifying the '\
- 'master username within the normal username string, you can '\
- 'specify the separator character here (format: '\
- ').',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_anonymous_username',
- display_name: 'auth anonymous username',
- description:
- 'Username to use for users logging in with ANONYMOUS SASL '\
- 'mechanism.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_worker_max_count',
- display_name: 'auth worker max count',
- description: 'Maximum number of dovecot-auth worker processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_gssapi_hostname',
- display_name: 'auth gssapi hostname',
- description: 'Host name to use in GSSAPI principal names.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_krb5_keytab',
- display_name: 'auth krb5 keytab',
- description: 'Kerberos keytab to use for the GSSAPI mechanism.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_use_winbind',
- display_name: 'auth use winbind',
- description:
- 'Do NTLM and GSS-SPNEGO authentication using Samba\'s winbind '\
- 'daemon and ntlm_auth helper.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_winbind_helper_path',
- display_name: 'auth winbind helper path',
- description: 'Path for Samba\'s ntlm_auth helper binary.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_failure_delay',
- display_name: 'auth failure delay',
- description:
- 'Time to delay before replying to failed authentications.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_ssl_require_client_cert',
- display_name: 'auth ssl require client cert',
- description:
- 'Take the username from client\'s SSL certificate, using '\
- 'X509_NAME_get_text_by_NID() which returns the subject\'s DN\'s '\
- 'CommonName.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_mechanisms',
- display_name: 'auth mechanisms',
- description:
- 'Space separated list of wanted authentication mechanisms: plain, '\
- 'login, digest-md5, cram-md5, ntlm, rpa, apop, anonymous, gssapi, '\
- 'otp, skey, gss-spnego',
- type: 'string',
- required: 'optional',
- default: '"plain"'
-
-# conf.d/10-director.conf
-
-attribute 'dovecot/conf/director_servers',
- display_name: 'director servers',
- description:
- 'List of IPs or hostnames to all director servers, including '\
- 'ourself (as a string or as an array).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/director_mail_servers',
- display_name: 'director mail servers',
- description: 'List of IPs or hostnames to all backend mail servers.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/director_user_expire',
- display_name: 'director_user_expire',
- description:
- 'How long to redirect users to a specific server after it no '\
- 'longer has any connections.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/director_doveadm_port',
- display_name: 'director doveadm port',
- description:
- 'TCP/IP port that accepts doveadm connections (instead of '\
- 'director connections).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/director_username_hash',
- display_name: 'director username hash',
- description:
- 'How the username is translated before being hashed.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/10-logging.conf
-
-attribute 'dovecot/conf/log_path',
- display_name: 'path',
- description:
- 'Log file to use for error messages. "syslog" logs to syslog, '\
- '/dev/stderr logs to stderr.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/info_log_path',
- display_name: 'info log path',
- description:
- 'Log file to use for informational messages. Defaults to log_path.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/debug_log_path',
- display_name: 'debug log path',
- description:
- 'Log file to use for debug messages. Defaults to info_log_path.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/syslog_facility',
- display_name: 'syslog facility',
- description: 'Syslog facility to use if you\'re logging to syslog.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_verbose',
- display_name: 'auth verbose',
- description:
- 'Log unsuccessful authentication attempts and the reasons why '\
- 'they failed.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_verbose_passwords',
- display_name: 'auth verbose passwords',
- description:
- 'In case of password mismatches, log the attempted password.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_debug',
- display_name: 'auth debug',
- description: 'Even more verbose logging for debugging purposes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_debug_passwords',
- display_name: 'auth debug passwords',
- description:
- 'In case of password mismatches, log the passwords and used '\
- 'scheme so the problem can be debugged.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_debug',
- display_name: 'mail debug',
- description: 'Enable mail process debugging.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/verbose_ssl',
- display_name: 'verbose ssl',
- description: 'Show protocol level SSL errors.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/log_timestamp',
- display_name: 'log timestamp',
- description: 'Prefix for each line written to log file.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/login_log_format_elements',
- display_name: 'login log format elements',
- description:
- 'Space-separated list (or array) of elements we want to log.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/login_log_format',
- display_name: 'login log format',
- description: 'Login log format.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_log_prefix',
- display_name: 'mail log prefix',
- description: 'Log prefix for mail processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/deliver_log_format',
- display_name: 'deliver log format',
- description: 'Format to use for logging mail deliveries.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/10-mail.conf
-
-attribute 'dovecot/conf/mail_location',
- display_name: 'mail location',
- description: 'Location for user\'s mailboxes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_shared_explicit_inbox',
- display_name: 'mail shared explicit inbox',
- description:
- 'Should shared INBOX be visible as "shared/user" or '\
- '"shared/user/INBOX"?',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_uid',
- display_name: 'mail uid',
- description: 'System user used to access mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_gid',
- display_name: 'mail gid',
- description: 'System group used to access mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_privileged_group',
- display_name: 'mail privileged group',
- description: 'Group to enable temporarily for privileged operations.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_access_groups',
- display_name: 'mail access groups',
- description:
- 'Grant access to these supplementary groups for mail processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_full_filesystem_access',
- display_name: 'mail full filesystem access',
- description: 'Allow full filesystem access to clients.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_attribute_dict',
- display_name: 'mail attribute dict',
- description: 'Dictionary for key=value mailbox attributes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_server_comment',
- display_name: 'mail server comment',
- description: 'A comment or note that is associated with the server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_server_admin',
- display_name: 'mail server admin',
- description:
- 'Indicates a method for contacting the server administrator. '\
- 'This value MUST be a URI.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mmap_disable',
- display_name: 'mmap disable',
- description: 'Don\'t use mmap() at all.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/dotlock_use_excl',
- display_name: 'dotlock use excl',
- description: 'Rely on O_EXCL to work when creating dotlock files.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_fsync',
- display_name: 'mail fsync',
- description:
- 'When to use fsync() or fdatasync() calls: optimized, always or '\
- 'never',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_nfs_storage',
- display_name: 'mail nfs storage',
- description: 'Mail storage exists in NFS.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_nfs_index',
- display_name: 'mail nfs index',
- description: 'Mail index files also exist in NFS.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lock_method',
- display_name: 'lock method',
- description:
- 'Locking method for index files: fcntl, flock or dotlock.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_temp_dir',
- display_name: 'mail temp dir',
- description:
- 'Directory in which LDA/LMTP temporarily stores incoming mails '\
- '>128 kB.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/first_valid_uid',
- display_name: 'first valid uid',
- description: 'Valid UID range for users, defaults to 500 and above.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/last_valid_uid',
- display_name: 'last valid uid',
- description: 'Valid UID range for users, defaults to 500 and above.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/first_valid_gid',
- display_name: 'first valid gid',
- description: 'Valid GID range for users, defaults to non-root/wheel.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/last_valid_gid',
- display_name: 'last valid gid',
- description: 'Valid GID range for users, defaults to non-root/wheel.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_max_keyword_length',
- display_name: 'mail max keyword length',
- description: 'Maximum allowed length for mail keyword name.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/valid_chroot_dirs',
- display_name: 'valid chroot dirs',
- description:
- '\':\' separated list of directories under which chrooting is '\
- 'allowed for mail processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_chroot',
- display_name: 'mail chroot',
- description: 'Default chroot directory for mail processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/auth_socket_path',
- display_name: 'auth socket path',
- description:
- 'UNIX socket path to master authentication server to find users.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_plugin_dir',
- display_name: 'mail plugin dir',
- description: 'Directory where to look up mail plugins.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_cache_min_mail_count',
- display_name: 'mail cache min mail count',
- description:
- 'The minimum number of mails in a mailbox before updates are done '\
- 'to cache file.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mailbox_idle_check_interval',
- display_name: 'mailbox idle check interval',
- description:
- 'When IDLE command is running, mailbox is checked once in a while '\
- 'to see if there are any new mails or other changes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_save_crlf',
- display_name: 'mail save crlf',
- description: 'Save mails with CR+LF instead of plain LF.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_prefetch_count',
- display_name: 'mail prefetch count',
- description:
- 'Max number of mails to keep open and prefetch to memory.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_temp_scan_interval',
- display_name: 'mail temp scan interval',
- description:
- 'How often to scan for stale temporary files and delete them '\
- '(0 = never).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/maildir_stat_dirs',
- display_name: 'maildir stat dirs',
- description:
- 'By default LIST command returns all entries in maildir beginning '\
- 'with a dot.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/maildir_copy_with_hardlinks',
- display_name: 'maildir copy with hardlinks',
- description:
- 'When copying a message, do it with hard links whenever possible.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/maildir_very_dirty_syncs',
- display_name: 'maildir very dirty syncs',
- description: 'Assume Dovecot is the only MUA accessing Maildir.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/maildir_broken_filename_sizes',
- display_name: 'maildir broken filename sizes',
- description:
- 'If enabled, Dovecot doesn\'t use the S= in the Maildir '\
- 'filenames for getting the mail\'s physical size, except when '\
- 'recalculating Maildir++ quota.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/maildir_empty_new',
- display_name: 'maildir empty new',
- description:
- 'Always move mails from new/ directory to cur/, even when the '\
- '\Recent flags aren\'t being reset.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_read_locks',
- display_name: 'mbox read locks',
- description:
- 'Which read locking methods to use for locking mbox: dotlock, '\
- 'dotlock_try, fcntl, flock or lockfyy',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_write_locks',
- display_name: 'mbox write locks',
- description:
- 'Which write locking methods to use for locking mbox: dotlock, '\
- 'dotlock_try, fcntl, flock or lockfyy',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_lock_timeout',
- display_name: 'mbox lock timeout',
- description:
- 'Maximum time to wait for lock (all of them) before aborting.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_dotlock_change_timeout',
- display_name: 'mbox dotlock change timeout',
- description:
- 'If dotlock exists but the mailbox isn\'t modified in any way, '\
- 'override the lock file after this much time.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_dirty_syncs',
- display_name: 'mbox dirty syncs',
- description:
- 'When mbox changes unexpectedly simply read the new mails but '\
- 'still safely fallbacks to re-reading the whole mbox file '\
- 'whenever something in mbox isn\'t how it\'s expected to be.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_very_dirty_syncs',
- display_name: 'mbox very dirty syncs',
- description:
- 'Like mbox_dirty_syncs, but don\'t do full syncs even with '\
- 'SELECT, EXAMINE, EXPUNGE or CHECK commands.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_lazy_writes',
- display_name: 'mbox lazy writes',
- description:
- 'Delay writing mbox headers until doing a full write sync '\
- '(EXPUNGE and CHECK commands and when closing the mailbox).',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_min_index_size',
- display_name: 'mbox min index size',
- description:
- 'If mbox size is smaller than this (e.g. 100k), don\'t write '\
- 'index files.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mbox_md5',
- display_name: 'mbox md5',
- description:
- 'Mail header selection algorithm to use for MD5 POP3 UIDLs when '\
- 'pop3_uidl_format=%m.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mdbox_rotate_size',
- display_name: 'mdbox rotate size',
- description: 'Maximum dbox file size until it\'s rotated.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mdbox_rotate_interval',
- display_name: 'mdbox rotate interval',
- description: 'Maximum dbox file age until it\'s rotated.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mdbox_preallocate_space',
- display_name: 'mdbox preallocate space',
- description:
- 'When creating new mdbox files, immediately preallocate their '\
- 'size to mdbox_rotate_size.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_attachment_dir',
- display_name: 'mail attachment dir',
- description:
- 'Directory root where to store mail attachments. Disabled, if '\
- 'empty.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_attachment_min_size',
- display_name: 'mail attachment min size',
- description:
- 'Attachments smaller than this aren\'t saved externally.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_attachment_fs',
- display_name: 'mail attachment fs',
- description:
- 'Filesystem backend to use for saving attachments: posix, sis '\
- 'posix or sis-queue posix.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/mail_attachment_hash',
- display_name: 'mail attachment hash',
- description: 'Hash format to use in attachment filenames.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/10-master.conf
-
-attribute 'dovecot/conf/default_process_limit',
- display_name: 'default process limit',
- description: 'Default process limit.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/default_client_limit',
- display_name: 'default client limit',
- description: 'Default client limit.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/default_vsz_limit',
- display_name: 'default vsz limit',
- description:
- 'Default VSZ (virtual memory size) limit for service processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/default_login_user',
- display_name: 'default login user',
- description: 'Login user is internally used by login processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/default_internal_user',
- display_name: 'default internal user',
- description: 'Internal user is used by unprivileged processes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/10-ssl.conf
-
-attribute 'dovecot/conf/ssl',
- display_name: 'ssl',
- description: 'SSL/TLS support: true or false',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_cert',
- display_name: 'ssl cert',
- description: 'PEM encoded X.509 SSL/TLS certificate.',
- calculated: true,
- type: 'string',
- required: 'optional'
-
-attribute 'dovecot/conf/ssl_key',
- display_name: 'ssl key',
- description: 'PEM encoded X.509 SSL/TLS private key.',
- calculated: true,
- type: 'string',
- required: 'optional'
-
-attribute 'dovecot/conf/ssl_key_password',
- display_name: 'ssl key password',
- description:
- 'If key file is password protected, give the password here.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_ca',
- display_name: 'ssl ca',
- description: 'PEM encoded trusted certificate authority.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_require_crl',
- display_name: 'ssl require crl',
- description:
- 'Require that CRL check succeeds for client certificates.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_client_ca_dir',
- display_name: 'ssl client ca dir',
- description:
- 'Directory for trusted SSL CA certificates. These are used only '\
- 'when Dovecot needs to act as an SSL client..',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_client_ca_file',
- display_name: 'ssl client ca file',
- description:
- 'File for trusted SSL CA certificates. These are used only when '\
- 'Dovecot needs to act as an SSL client.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_verify_client_cert',
- display_name: 'ssl verify client cert',
- description: 'Request client to send a certificate.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_cert_username_field',
- display_name: 'ssl cert username field',
- description: 'Which field from certificate to use for username.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_parameters_regenerate',
- display_name: 'ssl parameters regenerate',
- description: 'How often to regenerate the SSL parameters file.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_dh_parameters_length',
- display_name: 'ssl dh parameters length',
- description: 'DH parameters length to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_protocols',
- display_name: 'ssl protocols',
- description: 'SSL protocols to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_cipher_list',
- display_name: 'ssl cipher list',
- description: 'SSL ciphers to use',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_prefer_server_ciphers',
- display_name: 'ssl prefer server ciphers',
- description: 'Prefer the server\'s order of ciphers over client\'s.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_options',
- display_name: 'ssl options',
- description:
- 'SSL extra options. Currently supported options are: '\
- 'no_compression',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ssl_crypto_device',
- display_name: 'ssl crypto device',
- description:
- 'SSL crypto device to use, for valid values run '\
- '"$ openssl engine".',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/15-lda.conf
-
-attribute 'dovecot/conf/postmaster_address',
- display_name: 'postmaster address',
- description: 'Address to use when sending rejection mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/hostname',
- display_name: 'hostname',
- description:
- 'Hostname to use in various parts of sent mails, eg. in '\
- 'Message-Id.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/quota_full_tempfail',
- display_name: 'quota full tempfail',
- description:
- 'If user is over quota, return with temporary failure instead of '\
- 'bouncing the mail.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sendmail_path',
- display_name: 'sendmail path',
- description: 'Binary to use for sending mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/submission_host',
- display_name: 'submission host',
- description:
- 'If non-empty, send mails via this SMTP host[:port] instead of '\
- 'sendmail.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/rejection_subject',
- display_name: 'rejection subject',
- description: 'Subject: header to use for rejection mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/rejection_reason',
- display_name: 'rejection reason',
- description: 'Human readable error message for rejection mails.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/recipient_delimiter',
- display_name: 'recipient delimiter',
- description:
- 'Delimiter character between local-part and detail in email '\
- 'address.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lda_original_recipient_header',
- display_name: 'lda original recipient header',
- description:
- 'Header where the original recipient address (SMTP\'s RCPT TO: '\
- 'address) is taken from if not available elsewhere.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lda_mailbox_autocreate',
- display_name: 'lda mailbox autocreate',
- description:
- 'Should saving a mail to a nonexistent mailbox automatically '\
- 'create it?',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lda_mailbox_autosubscribe',
- display_name: 'lda mailbox autosubscribe',
- description:
- 'Should automatically created mailboxes be also automatically '\
- 'subscribed?',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# conf.d/20-lmtp.conf
-
-attribute 'dovecot/conf/lmtp_proxy',
- display_name: 'lmtp proxy',
- description:
- 'Support proxying to other LMTP/SMTP servers by performing passdb '\
- 'lookups.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lmtp_save_to_detail_mailbox',
- display_name: 'lmtp save to detail mailbox',
- description:
- 'When recipient address includes the detail (e.g. user+detail), '\
- 'try to save the mail to the detail mailbox.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lmtp_rcpt_check_quota',
- display_name: 'lmtp rcpt check quota',
- description:
- 'Verify quota before replying to RCPT TO. This adds a small '\
- 'overhead.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/lmtp_hdr_delivery_address',
- display_name: 'lmtp hdr delivery address',
- description:
- 'Which recipient address to use for Delivered-To: header and '\
- 'Received: header.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# dovecot-db.conf.ext
-
-attribute 'dovecot/conf/db',
- display_name: 'berkeley db',
- description: 'DB_CONFIG for Berkeley DB as a hash.',
- type: 'hash',
- required: 'optional',
- default: nil
-
-# conf-dovecot-dict-sql.rb
-
-grouping 'dovecot/conf/dict_sql',
- title: 'dict sql config',
- description: 'Dovecot dict sql configuration'
-
-attribute 'dovecot/conf/dict_sql/connect',
- display_name: 'dict sql connect',
- description:
- 'Dict sql connect configuration as a string or an array.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/dict_sql/maps',
- display_name: 'dict sql maps',
- description: 'Dict sql database tables maps.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# dovecot-ldap.conf.ext
-
-grouping 'dovecot/conf/ldap',
- title: 'ldap config',
- description: 'Dovecot LDAP Authentication Database configuration'
-
-attribute 'dovecot/conf/ldap/hosts',
- display_name: 'ldap hosts',
- description: 'Space separated list or array of LDAP hosts to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/uris',
- display_name: 'ldap uris',
- description: 'LDAP URIs to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/dn',
- display_name: 'ldap dn',
- description:
- 'Distinguished Name, the username used to login to the LDAP '\
- 'server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/dnpass',
- display_name: 'ldap dnpass',
- description: 'Password for LDAP server, if dn is specified.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/sasl_bind',
- display_name: 'ldap sasl bind',
- description: 'Use SASL binding instead of the simple binding.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/sasl_mech',
- display_name: 'ldap sasl mech',
- description: 'SASL mechanism name to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/sasl_realm',
- display_name: 'ldap sasl realm',
- description: 'SASL realm to use.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/sasl_authz_id',
- display_name: 'ldap sasl authz id',
- description:
- 'SASL authorization ID, ie. the dnpass is for this "master user", '\
- 'but the dn is still the logged in user.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls',
- display_name: 'ldap tls',
- description: 'Use TLS to connect to the LDAP server.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_ca_cert_file',
- display_name: 'ldap tls ca cert file',
- description: 'TLS options, currently supported only with OpenLDAP.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_ca_cert_file',
- display_name: 'ldap tls ca cert file',
- description: 'TLS options, currently supported only with OpenLDAP.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_ca_cert_dir',
- display_name: 'ldap tls ca cert dir',
- description: 'TLS options, currently supported only with OpenLDAP.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_cipher_suite',
- display_name: 'ldap tls cipher suite',
- description: 'TLS options, currently supported only with OpenLDAP.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_cert_file',
- display_name: 'ldap tls cert file',
- description:
- 'TLS cert/key is used only if LDAP server requires a client '\
- 'certificate.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_key_file',
- display_name: 'ldap tls key file',
- description:
- 'TLS cert/key is used only if LDAP server requires a client '\
- 'certificate.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/tls_require_cert',
- display_name: 'ldap tls require cert',
- description: 'Valid values: never, hard, demand, allow, try',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/ldaprc_path',
- display_name: 'ldap ldaprc path',
- description: 'Use the given ldaprc path.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/debug_level',
- display_name: 'ldap debug level',
- description:
- 'LDAP library debug level as specified by LDAP_DEBUG_* in '\
- 'ldap_log.h.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/auth_bind',
- display_name: 'ldap auth bind',
- description:
- 'Use authentication binding for verifying password\'s validity.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/auth_bind_userdn',
- display_name: 'ldap auth bind userdn',
- description:
- 'If authentication binding is used, you can save one LDAP request '\
- 'per login if user\'s DN can be specified with a common template.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/ldap_version',
- display_name: 'ldap ldap version',
- description: 'LDAP protocol version to use. Likely 2 or 3.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/base',
- display_name: 'ldap base',
- description: 'LDAP base. %variables can be used here.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/deref',
- display_name: 'ldap deref',
- description: 'Dereference: never, searching, finding or always.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/scope',
- display_name: 'ldap scope',
- description: 'Search scope: base, onelevel or subtree.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/user_attrs',
- display_name: 'ldap user attrs',
- description:
- 'User attributes are given in LDAP-name=dovecot-internal-name '\
- 'list.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/user_filter',
- display_name: 'ldap user filter',
- description: 'Filter for user lookup.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/pass_attrs',
- display_name: 'ldap pass attrs',
- description: 'Password checking attributes.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/pass_filter',
- display_name: 'ldap pass filter',
- description: 'Filter for password lookups.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/iterate_attrs',
- display_name: 'ldap iterate attrs',
- description: 'Attributes to get a list of all users',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/iterate_filter',
- display_name: 'ldap iterate filter',
- description: 'Filter to get a list of all users',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/ldap/default_pass_scheme',
- display_name: 'ldap default pass scheme',
- description:
- 'Default password scheme. "{scheme}" before password overrides '\
- 'this.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-# dovecot-sql.conf.ext
-
-attribute 'dovecot/conf/sql/driver',
- display_name: 'sql driver',
- description: 'Database driver: mysql, pgsql or sqlite.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sql/connect',
- display_name: 'sql connect',
- description:
- 'Database connection string or array. This is driver-specific '\
- 'setting.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sql/default_pass_scheme',
- display_name: 'sql default pass scheme',
- description: 'Default password scheme.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sql/password_query',
- display_name: 'sql password query',
- description: 'passdb query to retrieve the password.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sql/user_query',
- display_name: 'sql user query',
- description: 'userdb query to retrieve the user information.',
- type: 'string',
- required: 'optional',
- default: 'nil'
-
-attribute 'dovecot/conf/sql/iterate_query',
- display_name: 'sql iterate query',
- description: 'Query to get a list of all usernames.',
- type: 'string',
- required: 'optional',
- default: 'nil'
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/conf_files.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/conf_files.rb
deleted file mode 100644
index ed8a1f2a28147dd0bd4fd274fc46a62beea311eb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/conf_files.rb
+++ /dev/null
@@ -1,84 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: conf_files
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# required directories
-
-directory node['dovecot']['lib_path'] do
- owner node['dovecot']['conf_files_user']
- group node['dovecot']['conf_files_group']
- mode '00755'
-end
-conf_files_dirs =
- node['dovecot']['conf_files'].values.reduce([]) do |r, conf_files|
- r + conf_files.map { |f| ::File.dirname(f) }
- end.uniq
-conf_files_dirs.each do |dir|
- directory "#{node['dovecot']['conf_path']}/#{dir}" do
- name ::File.join(node['dovecot']['conf_path'], dir)
- recursive true
- owner node['dovecot']['conf_files_user']
- group node['dovecot']['group']
- mode '00755'
- only_if { dir != '.' }
- end
-end
-
-# config files
-
-node['dovecot']['conf_files'].each do |type, conf_files|
- conf_files.each do |conf_file|
- template "(#{type}) #{conf_file}" do
- path conf_file
- # calculate file mode function
- def get_conf_file_mode(conf_file)
- node['dovecot']['sensitive_files'].each do |file_glob|
- if ::File.fnmatch?(file_glob, conf_file, ::File::FNM_PATHNAME)
- return node['dovecot']['sensitive_files_mode']
- end
- end
- node['dovecot']['conf_files_mode']
- end
-
- path ::File.join(node['dovecot']['conf_path'], conf_file)
- source "#{conf_file}.erb"
- owner node['dovecot']['conf_files_user']
- group node['dovecot']['conf_files_group']
- mode get_conf_file_mode(conf_file)
- variables(
- auth: node['dovecot']['auth'].to_hash,
- protocols: node['dovecot']['protocols'].to_hash,
- services: node['dovecot']['services'].to_hash,
- plugins: node['dovecot']['plugins'].to_hash,
- namespaces: node['dovecot']['namespaces'],
- conf: node['dovecot']['conf']
- )
- if DovecotCookbook::Conf.require?(type, node['dovecot'])
- action :create
- else
- action :delete
- end
- notifies :reload, 'service[dovecot]'
- end # template conf_file
- end # conf_files.each
-end # node['dovecot']['conf_files'].each
-
-# Already included in ::default recipe, required for ChefSpec tests
-include_recipe 'dovecot::service'
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/create_pwfile.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/create_pwfile.rb
deleted file mode 100644
index 89d12c9c5f3f0807a0b3b3db9750bbd7fe89e376..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/create_pwfile.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-# Cookbook Name:: dovecot
-# Recipe:: create_pwfile
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The file credentials should be like:
-# user:password:uid:gid:(gecos):home:(shell):extra_fields
-# We ignore gecos and shell, all the others are included in the script but
-# uid,gid,home,extra_Fields can be nil
-# user:pass::::::
-
-# Predefined Variables
-credentials = []
-update_credentials = false
-db_name = node['dovecot']['databag_name']
-db_item = node['dovecot']['databag_users_item']
-pwfile = node['dovecot']['conf']['password_file']
-
-ruby_block 'databag_to_dovecot_userdb' do
- block do
- databag_users = data_bag_item(db_name, db_item)['users']
-
- # Check if passwd file exists:
- local_creds, pwfile_exists = DovecotCookbook::Pwfile.passfile_read(pwfile)
- # Check if users on both passwd file and databag are the same.
- # If not, force credentials update:
- unless DovecotCookbook::Pwfile
- .arrays_same?(databag_users.keys, local_creds.keys)
- update_credentials = true
- end
- # Check if users has a changed password, if not change it and force update:
- update_credentials =
- DovecotCookbook::Pwfile.compile_users(
- databag_users, local_creds, pwfile_exists, update_credentials,
- credentials
- )
- end
- action :run
-end
-
-template node['dovecot']['conf']['password_file'] do
- source 'password.erb'
- owner node['dovecot']['user']
- group node['dovecot']['group']
- mode '0640'
- sensitive true
- variables(
- credentials: credentials
- )
- only_if { update_credentials }
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/default.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/default.rb
deleted file mode 100644
index f655bcb5737dc16b40af9464756141ada5025260..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/default.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: default
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'dovecot::ohai_plugin'
-include_recipe 'dovecot::user'
-include_recipe "dovecot::from_#{node['dovecot']['install_from']}"
-include_recipe 'dovecot::conf_files'
-include_recipe 'dovecot::service'
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/from_package.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/from_package.rb
deleted file mode 100644
index c30c8c6aefd56fa286c08311eefa188ac8685d5c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/from_package.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: from_package
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Already included in ::default recipe, required for ChefSpec tests
-include_recipe 'dovecot::ohai_plugin'
-
-node['dovecot']['packages'].each do |type, pkgs|
- if pkgs.is_a?(String)
- pkgs = [pkgs]
- elsif !pkgs.is_a?(Array)
- raise "`node['dovecot']['packages']['#{type}']` should contain an array "\
- "of packages. You passed: #{pkgs.inspect}"
- end
-
- pkgs.each do |pkg|
- package "(#{type}) #{pkg}" do
- package_name pkg
- only_if { DovecotCookbook::Conf.require?(type, node['dovecot']) }
- if type == 'core' || node['dovecot']['ohai_plugin']['build-options']
- notifies :reload, 'ohai[dovecot]', :immediately
- end
- end # package
- end # pkg.each
-end # node['dovecot']['packages'].each
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/ohai_plugin.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/ohai_plugin.rb
deleted file mode 100644
index f7f3ea1c3b400d5c0ccde54798ed524cf4f6e84f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/ohai_plugin.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: ohai_plugin
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-def ohai7?
- Gem::Requirement.new('>= 7').satisfied_by?(Gem::Version.new(Ohai::VERSION))
-end
-
-ohai_build_options = node['dovecot']['ohai_plugin']['build-options']
-source_dir = ohai7? ? 'ohai7_plugins' : 'ohai_plugins'
-
-# dummy resource to be able to notify reload action to the ohai plugin
-ohai 'dovecot' do
- action :nothing
-end
-
-ohai_plugin 'dovecot' do
- name 'dovecot'
- source_file "#{source_dir}/dovecot.rb.erb"
- resource :template
- variables enable_build_options: ohai_build_options
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/service.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/service.rb
deleted file mode 100644
index 81b6331c00142abdb90dd12165e8b974eca23013..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/service.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: service
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2015 Xabier de Zuazo
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-service 'dovecot' do
- service_name node['dovecot']['service']['name']
- supports Mash.new(node['dovecot']['service']['supports'])
- provider node['dovecot']['service']['provider']
- action [:enable, :start]
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/recipes/user.rb b/lc-gdn-chef/cookbooks/dovecot/recipes/user.rb
deleted file mode 100644
index 4131005f0258805e8cc32a06b8c6a8280c52b7ca..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/recipes/user.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# encoding: UTF-8
-#
-# Cookbook Name:: dovecot
-# Recipe:: user
-# Author:: Xabier de Zuazo ()
-# Copyright:: Copyright (c) 2013-2014 Onddo Labs, SL.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-user node['dovecot']['user'] do
- comment 'Dovecot mail server'
- home node['dovecot']['user_homedir']
- shell '/bin/false'
- system true
-end
-
-group node['dovecot']['group'] do
- members [node['dovecot']['user']]
- system true
- append true
-end
-
-default_login_user = node['dovecot']['conf']['default_login_user'] || 'dovenull'
-
-group default_login_user do
- system true
-end
-
-user default_login_user do
- group default_login_user
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-auth.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-auth.conf.erb
deleted file mode 100644
index c5422509c7c7ce5c857a7759335974a58e85444b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-auth.conf.erb
+++ /dev/null
@@ -1,131 +0,0 @@
-# Generated by Chef
-
-##
-## Authentication processes
-##
-
-# Disable LOGIN command and all other plaintext authentications unless
-# SSL/TLS is used (LOGINDISABLED capability). Note that if the remote IP
-# matches the local IP (ie. you're connecting from the same computer), the
-# connection is considered secure and plaintext authentication is allowed.
-# See also ssl=required setting.
-<%= DovecotCookbook::Conf.attribute(@conf, 'disable_plaintext_auth', true) %>
-
-# Authentication cache size (e.g. 10M). 0 means it's disabled. Note that
-# bsdauth, PAM and vpopmail require cache_key to be set for caching to be used.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_cache_size', 0) %>
-# Time to live for cached data. After TTL expires the cached record is no
-# longer used, *except* if the main database lookup returns internal failure.
-# We also try to handle password changes automatically: If user's previous
-# authentication was successful, but this one wasn't, the cache isn't used.
-# For now this works only with plaintext authentication.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_cache_ttl', '1 hour') %>
-# TTL for negative hits (user not found, password mismatch).
-# 0 disables caching them completely.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_cache_negative_ttl', '1 hour') %>
-
-# Space separated list of realms for SASL authentication mechanisms that need
-# them. You can leave it empty if you don't want to support multiple realms.
-# Many clients simply use the first one listed here, so keep the default realm
-# first.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_realms') %>
-
-# Default realm/domain to use if none was specified. This is used for both
-# SASL realms and appending @domain to username in plaintext logins.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_default_realm') %>
-
-# List of allowed characters in username. If the user-given username contains
-# a character not listed in here, the login automatically fails. This is just
-# an extra check to make sure user can't exploit any potential quote escaping
-# vulnerabilities with SQL/LDAP databases. If you want to allow all characters,
-# set this value to empty.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_username_chars', 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890.-_@') %>
-
-# Username character translations before it's looked up from databases. The
-# value contains series of from -> to characters. For example "#@/@" means
-# that '#' and '/' characters are translated to '@'.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_username_translation') %>
-
-# Username formatting before it's looked up from databases. You can use
-# the standard variables here, eg. %Lu would lowercase the username, %n would
-# drop away the domain if it was given, or "%n-AT-%d" would change the '@' into
-# "-AT-". This translation is done after auth_username_translation changes.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_username_format', '%Lu') %>
-
-# If you want to allow master users to log in by specifying the master
-# username within the normal username string (ie. not using SASL mechanism's
-# support for it), you can specify the separator character here. The format
-# is then . UW-IMAP uses "*" as the
-# separator, so that could be a good choice.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_master_user_separator') %>
-
-# Username to use for users logging in with ANONYMOUS SASL mechanism
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_anonymous_username', 'anonymous') %>
-
-# Maximum number of dovecot-auth worker processes. They're used to execute
-# blocking passdb and userdb queries (eg. MySQL and PAM). They're
-# automatically created and destroyed as needed.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_worker_max_count', 30) %>
-
-# Host name to use in GSSAPI principal names. The default is to use the
-# name returned by gethostname(). Use "$ALL" (with quotes) to allow all keytab
-# entries.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_gssapi_hostname') %>
-
-# Kerberos keytab to use for the GSSAPI mechanism. Will use the system
-# default (usually /etc/krb5.keytab) if not specified. You may need to change
-# the auth service to run as root to be able to read this file.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_krb5_keytab') %>
-
-# Do NTLM and GSS-SPNEGO authentication using Samba's winbind daemon and
-# ntlm_auth helper.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_use_winbind', false) %>
-
-# Path for Samba's ntlm_auth helper binary.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_winbind_helper_path', '/usr/bin/ntlm_auth') %>
-
-# Time to delay before replying to failed authentications.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_failure_delay', '2 secs') %>
-
-# Require a valid SSL client certificate or the authentication fails.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_ssl_require_client_cert', false) %>
-
-# Take the username from client's SSL certificate, using
-# X509_NAME_get_text_by_NID() which returns the subject's DN's
-# CommonName.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_ssl_username_from_cert', false) %>
-
-# Space separated list of wanted authentication mechanisms:
-# plain login digest-md5 cram-md5 ntlm rpa apop anonymous gssapi otp skey
-# gss-spnego
-# NOTE: See also disable_plaintext_auth setting.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_mechanisms', 'plain') %>
-
-##
-## Password and user databases
-##
-
-#
-# Password database is used to verify user's password (and nothing more).
-# You can have multiple passdbs and userdbs. This is useful if you want to
-# allow both system users (/etc/passwd) and virtual users to login without
-# duplicating the system users into virtual database.
-#
-#
-#
-# User database specifies where mails are located and what user/group IDs
-# own them. For single-UID configuration use "static" userdb.
-#
-#
-
-<%= '#' unless @auth['deny'].kind_of?(Hash) %>!include auth-deny.conf.ext
-<%= '#' unless @auth['master'].kind_of?(Hash) %>!include auth-master.conf.ext
-
-<%= '#' unless @auth['system'].kind_of?(Hash) %>!include auth-system.conf.ext
-<%= '#' unless @auth['sql'].kind_of?(Hash) %>!include auth-sql.conf.ext
-<%= '#' unless DovecotCookbook::Conf::Require.ldap?(@conf) %>!include auth-ldap.conf.ext
-<%= '#' unless @auth['passwdfile'].kind_of?(Hash) %>!include auth-passwdfile.conf.ext
-<%= '#' unless @auth['checkpassword'].kind_of?(Hash) %>!include auth-checkpassword.conf.ext
-<%= '#' unless @auth['vpopmail'].kind_of?(Hash) %>!include auth-vpopmail.conf.ext
-<%= '#' unless @auth['static'].kind_of?(Hash) %>!include auth-static.conf.ext
-<%= '#' unless @auth['dict'].kind_of?(Hash) %>!include auth-dict.conf.ext
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-director.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-director.conf.erb
deleted file mode 100644
index 342eb9090331d0c291584eb394897d2ad4c75c08..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-director.conf.erb
+++ /dev/null
@@ -1,67 +0,0 @@
-# Generated by Chef
-
-##
-## Director-specific settings.
-##
-
-# Director can be used by Dovecot proxy to keep a temporary user -> mail server
-# mapping. As long as user has simultaneous connections, the user is always
-# redirected to the same server. Each proxy server is running its own director
-# process, and the directors are communicating the state to each others.
-# Directors are mainly useful with NFS-like setups.
-
-# List of IPs or hostnames to all director servers, including ourself.
-# Ports can be specified as ip:port. The default port is the same as
-# what director service's inet_listener is using.
-<%= DovecotCookbook::Conf.attribute(@conf, 'director_servers') %>
-
-# List of IPs or hostnames to all backend mail servers. Ranges are allowed
-# too, like 10.0.0.10-10.0.0.30.
-<%= DovecotCookbook::Conf.attribute(@conf, 'director_mail_servers') %>
-
-# How long to redirect users to a specific server after it no longer has
-# any connections.
-<%= DovecotCookbook::Conf.attribute(@conf, 'director_user_expire', '15 min') %>
-
-# TCP/IP port that accepts doveadm connections (instead of director connections)
-# If you enable this, you'll also need to add inet_listener for the port.
-<%= DovecotCookbook::Conf.attribute(@conf, 'director_doveadm_port', 0) %>
-
-# How the username is translated before being hashed. Useful values include
-# %Ln if user can log in with or without @domain, %Ld if mailboxes are shared
-# within domain.
-<%= DovecotCookbook::Conf.attribute(@conf, 'director_username_hash', '%Lu') %>
-
-<% if @services['director'].kind_of?(Hash) and @services['director'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('director', @services['director']) %>
-<% else -%>
-# To enable director service, uncomment the modes and assign a port.
-service director {
- unix_listener login/director {
- #mode = 0666
- }
- fifo_listener login/proxy-notify {
- #mode = 0666
- }
- unix_listener director-userdb {
- #mode = 0600
- }
- inet_listener {
- #port =
- }
-}
-<% end -%>
-
-# Enable director for the wanted login services by telling them to
-# connect to director socket instead of the default login socket:
-service imap-login {
- #executable = imap-login director
-}
-service pop3-login {
- #executable = pop3-login director
-}
-
-# Enable director for LMTP proxying:
-protocol lmtp {
- #auth_socket_path = director-userdb
-}
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-logging.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-logging.conf.erb
deleted file mode 100644
index cbcdb2e7ebffd8b68508654dcc76461341f8e997..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-logging.conf.erb
+++ /dev/null
@@ -1,91 +0,0 @@
-# Generated by Chef
-
-##
-## Log destination.
-##
-
-# Log file to use for error messages. "syslog" logs to syslog,
-# /dev/stderr logs to stderr.
-<%= DovecotCookbook::Conf.attribute(@conf, 'log_path', 'syslog') %>
-
-# Log file to use for informational messages. Defaults to log_path.
-<%= DovecotCookbook::Conf.attribute(@conf, 'info_log_path') %>
-# Log file to use for debug messages. Defaults to info_log_path.
-<%= DovecotCookbook::Conf.attribute(@conf, 'debug_log_path') %>
-
-# Syslog facility to use if you're logging to syslog. Usually if you don't
-# want to use "mail", you'll use local0..local7. Also other standard
-# facilities are supported.
-<%= DovecotCookbook::Conf.attribute(@conf, 'syslog_facility', 'mail') %>
-
-##
-## Logging verbosity and debugging.
-##
-
-# Log unsuccessful authentication attempts and the reasons why they failed.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_verbose', false) %>
-
-# In case of password mismatches, log the attempted password. Valid values are
-# no, plain and sha1. sha1 can be useful for detecting brute force password
-# attempts vs. user simply trying the same password over and over again.
-# You can also truncate the value to n chars by appending ":n" (e.g. sha1:6).
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_verbose_passwords', false) %>
-
-# Even more verbose logging for debugging purposes. Shows for example SQL
-# queries.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_debug', false) %>
-
-# In case of password mismatches, log the passwords and used scheme so the
-# problem can be debugged. Enabling this also enables auth_debug.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_debug_passwords', false) %>
-
-# Enable mail process debugging. This can help you figure out why Dovecot
-# isn't finding your mails.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_debug', false) %>
-
-# Show protocol level SSL errors.
-<%= DovecotCookbook::Conf.attribute(@conf, 'verbose_ssl', false) %>
-
-# mail_log plugin provides more event logging for mail processes.
-<% if @plugins.has_key?('mail_log') and @plugins['mail_log'].kind_of?(Hash) -%>
-<%= DovecotCookbook::Conf.plugin('mail_log', @plugins['mail_log']) %>
-<% else -%>
-plugin {
- # Events to log. Also available: flag_change append
- #mail_log_events = delete undelete expunge copy mailbox_delete mailbox_rename
- # Available fields: uid, box, msgid, from, subject, size, vsize, flags
- # size and vsize are available only for expunge and copy events.
- #mail_log_fields = uid box msgid size
-}
-<% end -%>
-
-##
-## Log formatting.
-##
-
-# Prefix for each line written to log file. % codes are in strftime(3)
-# format.
-<%= DovecotCookbook::Conf.attribute(@conf, 'log_timestamp', '"%b %d %H:%M:%S "') %>
-
-# Space-separated list of elements we want to log. The elements which have
-# a non-empty variable value are joined together to form a comma-separated
-# string.
-<%= DovecotCookbook::Conf.attribute(@conf, 'login_log_format_elements', 'user=<%u> method=%m rip=%r lip=%l mpid=%e %c') %>
-
-# Login log format. %s contains login_log_format_elements string, %$ contains
-# the data we want to log.
-<%= DovecotCookbook::Conf.attribute(@conf, 'login_log_format', '%$: %s') %>
-
-# Log prefix for mail processes. See doc/wiki/Variables.txt for list of
-# possible variables you can use.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_log_prefix', '"%s(%u): "') %>
-
-# Format to use for logging mail deliveries. See doc/wiki/Variables.txt for
-# list of all variables you can use. Some of the common ones include:
-# %$ - Delivery status message (e.g. "saved to INBOX")
-# %m - Message-ID
-# %s - Subject
-# %f - From address
-# %p - Physical size
-# %w - Virtual size
-<%= DovecotCookbook::Conf.attribute(@conf, 'deliver_log_format', 'msgid=%m: %$') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-mail.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-mail.conf.erb
deleted file mode 100644
index bfd924c64dfa5293e3dc615da89ed0db6e5f1c85..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-mail.conf.erb
+++ /dev/null
@@ -1,401 +0,0 @@
-# Generated by Chef
-
-##
-## Mailbox locations and namespaces
-##
-
-# Location for users' mailboxes. The default is empty, which means that Dovecot
-# tries to find the mailboxes automatically. This won't work if the user
-# doesn't yet have any mail, so you should explicitly tell Dovecot the full
-# location.
-#
-# If you're using mbox, giving a path to the INBOX file (eg. /var/mail/%u)
-# isn't enough. You'll also need to tell Dovecot where the other mailboxes are
-# kept. This is called the "root mail directory", and it must be the first
-# path given in the mail_location setting.
-#
-# There are a few special variables you can use, eg.:
-#
-# %u - username
-# %n - user part in user@domain, same as %u if there's no domain
-# %d - domain part in user@domain, empty if there's no domain
-# %h - home directory
-#
-# See doc/wiki/Variables.txt for full list. Some examples:
-#
-# mail_location = maildir:~/Maildir
-# mail_location = mbox:~/mail:INBOX=/var/mail/%u
-# mail_location = mbox:/var/mail/%d/%1n/%n:INDEX=/var/indexes/%d/%1n/%n
-#
-#
-#
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_location') %>
-
-# If you need to set multiple mailbox locations or want to change default
-# namespace settings, you can do it by defining namespace sections.
-#
-# You can have private, shared and public namespaces. Private namespaces
-# are for user's personal mails. Shared namespaces are for accessing other
-# users' mailboxes that have been shared. Public namespaces are for shared
-# mailboxes that are managed by sysadmin. If you create any shared or public
-# namespaces you'll typically want to enable ACL plugin also, otherwise all
-# users can access all the shared mailboxes, assuming they have permissions
-# on filesystem level to do so.
-<% if @namespaces.kind_of?(Array) and @namespaces.length > 0 -%>
-<% @namespaces.each do |ns| -%>
-<%= DovecotCookbook::Conf.namespace(ns) %>
-<% end -%>
-<% else -%>
-#namespace {
- # Namespace type: private, shared or public
- #type = private
-
- # Hierarchy separator to use. You should use the same separator for all
- # namespaces or some clients get confused. '/' is usually a good one.
- # The default however depends on the underlying mail storage format.
- #separator =
-
- # Prefix required to access this namespace. This needs to be different for
- # all namespaces. For example "Public/".
- #prefix =
-
- # Physical location of the mailbox. This is in same format as
- # mail_location, which is also the default for it.
- #location =
-
- # There can be only one INBOX, and this setting defines which namespace
- # has it.
- #inbox = yes
-
- # If namespace is hidden, it's not advertised to clients via NAMESPACE
- # extension. You'll most likely also want to set list=no. This is mostly
- # useful when converting from another server with different namespaces which
- # you want to deprecate but still keep working. For example you can create
- # hidden namespaces with prefixes "~/mail/", "~%u/mail/" and "mail/".
- #hidden = no
-
- # Show the mailboxes under this namespace with LIST command. This makes the
- # namespace visible for clients that don't support NAMESPACE extension.
- # "children" value lists child mailboxes, but hides the namespace prefix.
- #list = yes
-
- # Namespace handles its own subscriptions. If set to "no", the parent
- # namespace handles them (empty prefix should always have this as "yes")
- #subscriptions = yes
-
- # See 15-mailboxes.conf for definitions of special mailboxes.
-#}
-<% end -%>
-
-# Example shared namespace configuration
-#namespace {
- #type = shared
- #separator = /
-
- # Mailboxes are visible under "shared/user@domain/"
- # %%n, %%d and %%u are expanded to the destination user.
- #prefix = shared/%%u/
-
- # Mail location for other users' mailboxes. Note that %variables and ~/
- # expands to the logged in user's data. %%n, %%d, %%u and %%h expand to the
- # destination user's data.
- #location = maildir:%%h/Maildir:INDEX=~/Maildir/shared/%%u
-
- # Use the default namespace for saving subscriptions.
- #subscriptions = no
-
- # List the shared/ namespace only if there are visible shared mailboxes.
- #list = children
-#}
-# Should shared INBOX be visible as "shared/user" or "shared/user/INBOX"?
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_shared_explicit_inbox', false) %>
-
-# System user and group used to access mails. If you use multiple, userdb
-# can override these by returning uid or gid fields. You can use either numbers
-# or names.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_uid') %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_gid') %>
-
-# Group to enable temporarily for privileged operations. Currently this is
-# used only with INBOX when either its initial creation or dotlocking fails.
-# Typically this is set to "mail" to give access to /var/mail.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_privileged_group') %>
-
-# Grant access to these supplementary groups for mail processes. Typically
-# these are used to set up access to shared mailboxes. Note that it may be
-# dangerous to set these if users can create symlinks (e.g. if "mail" group is
-# set here, ln -s /var/mail ~/mail/var could allow a user to delete others'
-# mailboxes, or ln -s /secret/shared/box ~/mail/mybox would allow reading it).
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_access_groups') %>
-
-# Allow full filesystem access to clients. There's no access checks other than
-# what the operating system does for the active UID/GID. It works with both
-# maildir and mboxes, allowing you to prefix mailboxes names with eg. /path/
-# or ~user/.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_full_filesystem_access', false) %>
-
-# Dictionary for key=value mailbox attributes. This is used for example by
-# URLAUTH and METADATA extensions.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_attribute_dict') %>
-
-# A comment or note that is associated with the server. This value is
-# accessible for authenticated users through the IMAP METADATA server
-# entry "/shared/comment".
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_server_comment', '') %>
-
-# Indicates a method for contacting the server administrator. According to
-# RFC 5464, this value MUST be a URI (e.g., a mailto: or tel: URL), but that
-# is currently not enforced. Use for example mailto:admin@example.com. This
-# value is accessible for authenticated users through the IMAP METADATA server
-# entry "/shared/admin".
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_server_admin', '') %>
-
-##
-## Mail processes
-##
-
-# Don't use mmap() at all. This is required if you store indexes to shared
-# filesystems (NFS or clustered filesystem).
-<%= DovecotCookbook::Conf.attribute(@conf, 'mmap_disable', false) %>
-
-# Rely on O_EXCL to work when creating dotlock files. NFS supports O_EXCL
-# since version 3, so this should be safe to use nowadays by default.
-<%= DovecotCookbook::Conf.attribute(@conf, 'dotlock_use_excl', true) %>
-
-# When to use fsync() or fdatasync() calls:
-# optimized (default): Whenever necessary to avoid losing important data
-# always: Useful with e.g. NFS when write()s are delayed
-# never: Never use it (best performance, but crashes can lose data)
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_fsync', 'optimized') %>
-
-# Mail storage exists in NFS. Set this to yes to make Dovecot flush NFS caches
-# whenever needed. If you're using only a single mail server this isn't needed.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_nfs_storage', false) %>
-# Mail index files also exist in NFS. Setting this to yes requires
-# mmap_disable=yes and fsync_disable=no.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_nfs_index', false) %>
-
-# Locking method for index files. Alternatives are fcntl, flock and dotlock.
-# Dotlocking uses some tricks which may create more disk I/O than other locking
-# methods. NFS users: flock doesn't work, remember to change mmap_disable.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lock_method', 'fcntl') %>
-
-# Directory in which LDA/LMTP temporarily stores incoming mails >128 kB.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_temp_dir', '/tmp') %>
-
-# Valid UID range for users, defaults to 500 and above. This is mostly
-# to make sure that users can't log in as daemons or other system users.
-# Note that denying root logins is hardcoded to dovecot binary and can't
-# be done even if first_valid_uid is set to 0.
-<%= DovecotCookbook::Conf.attribute(@conf, 'first_valid_uid', 500) %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'last_valid_uid', 0) %>
-
-# Valid GID range for users, defaults to non-root/wheel. Users having
-# non-valid GID as primary group ID aren't allowed to log in. If user
-# belongs to supplementary groups with non-valid GIDs, those groups are
-# not set.
-<%= DovecotCookbook::Conf.attribute(@conf, 'first_valid_gid', 1) %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'last_valid_gid', 0) %>
-
-# Maximum allowed length for mail keyword name. It's only forced when trying
-# to create new keywords.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_max_keyword_length', 50) %>
-
-# ':' separated list of directories under which chrooting is allowed for mail
-# processes (ie. /var/mail will allow chrooting to /var/mail/foo/bar too).
-# This setting doesn't affect login_chroot, mail_chroot or auth chroot
-# settings. If this setting is empty, "/./" in home dirs are ignored.
-# WARNING: Never add directories here which local users can modify, that
-# may lead to root exploit. Usually this should be done only if you don't
-# allow shell access for users.
-<%= DovecotCookbook::Conf.attribute(@conf, 'valid_chroot_dirs') %>
-
-# Default chroot directory for mail processes. This can be overridden for
-# specific users in user database by giving /./ in user's home directory
-# (eg. /home/./user chroots into /home). Note that usually there is no real
-# need to do chrooting, Dovecot doesn't allow users to access files outside
-# their mail directory anyway. If your home directories are prefixed with
-# the chroot directory, append "/." to mail_chroot.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_chroot') %>
-
-# UNIX socket path to master authentication server to find users.
-# This is used by imap (for shared users) and lda.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_socket_path', '/var/run/dovecot/auth-userdb') %>
-
-# Directory where to look up mail plugins.
-<%= case node['platform_family']
- when 'rhel', 'fedora', 'suse'
- DovecotCookbook::Conf.attribute(@conf, 'mail_plugin_dir', '/usr/lib/dovecot')
- # when 'debian'
- else
- DovecotCookbook::Conf.attribute(@conf, 'mail_plugin_dir', '/usr/lib/dovecot/modules')
- end
-%>
-
-# Space separated list of plugins to load for all services. Plugins specific to
-# IMAP, LDA, etc. are added to this list in their own .conf files.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_plugins') %>
-
-##
-## Mailbox handling optimizations
-##
-
-# Mailbox list indexes can be used to optimize IMAP STATUS commands. They are
-# also required for IMAP NOTIFY extension to be enabled.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mailbox_list_index', 'no') %>
-
-# The minimum number of mails in a mailbox before updates are done to cache
-# file. This allows optimizing Dovecot's behavior to do less disk writes at
-# the cost of more disk reads.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_cache_min_mail_count', 0) %>
-
-# When IDLE command is running, mailbox is checked once in a while to see if
-# there are any new mails or other changes. This setting defines the minimum
-# time to wait between those checks. Dovecot can also use inotify and
-# kqueue to find out immediately when changes occur.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mailbox_idle_check_interval', '30 secs') %>
-
-# Save mails with CR+LF instead of plain LF. This makes sending those mails
-# take less CPU, especially with sendfile() syscall with Linux and FreeBSD.
-# But it also creates a bit more disk I/O which may just make it slower.
-# Also note that if other software reads the mboxes/maildirs, they may handle
-# the extra CRs wrong and cause problems.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_save_crlf', false) %>
-
-# Max number of mails to keep open and prefetch to memory. This only works with
-# some mailbox formats and/or operating systems.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_prefetch_count', 0) %>
-
-# How often to scan for stale temporary files and delete them (0 = never).
-# These should exist only after Dovecot dies in the middle of saving mails.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_temp_scan_interval', '1w') %>
-
-##
-## Maildir-specific settings
-##
-
-# By default LIST command returns all entries in maildir beginning with a dot.
-# Enabling this option makes Dovecot return only entries which are directories.
-# This is done by stat()ing each entry, so it causes more disk I/O.
-# (For systems setting struct dirent->d_type, this check is free and it's
-# done always regardless of this setting)
-<%= DovecotCookbook::Conf.attribute(@conf, 'maildir_stat_dirs', false) %>
-
-# When copying a message, do it with hard links whenever possible. This makes
-# the performance much better, and it's unlikely to have any side effects.
-<%= DovecotCookbook::Conf.attribute(@conf, 'maildir_copy_with_hardlinks', true) %>
-
-# Assume Dovecot is the only MUA accessing Maildir: Scan cur/ directory only
-# when its mtime changes unexpectedly or when we can't find the mail otherwise.
-<%= DovecotCookbook::Conf.attribute(@conf, 'maildir_very_dirty_syncs', false) %>
-
-# If enabled, Dovecot doesn't use the S= in the Maildir filenames for
-# getting the mail's physical size, except when recalculating Maildir++ quota.
-# This can be useful in systems where a lot of the Maildir filenames have a
-# broken size. The performance hit for enabling this is very small.
-<%= DovecotCookbook::Conf.attribute(@conf, 'maildir_broken_filename_sizes', false) %>
-
-# Always move mails from new/ directory to cur/, even when the \Recent flags
-# aren't being reset.
-<%= DovecotCookbook::Conf.attribute(@conf, 'maildir_empty_new', false) %>
-
-##
-## mbox-specific settings
-##
-
-# Which locking methods to use for locking mbox. There are four available:
-# dotlock: Create .lock file. This is the oldest and most NFS-safe
-# solution. If you want to use /var/mail/ like directory, the users
-# will need write access to that directory.
-# dotlock_try: Same as dotlock, but if it fails because of permissions or
-# because there isn't enough disk space, just skip it.
-# fcntl : Use this if possible. Works with NFS too if lockd is used.
-# flock : May not exist in all systems. Doesn't work with NFS.
-# lockf : May not exist in all systems. Doesn't work with NFS.
-#
-# You can use multiple locking methods; if you do the order they're declared
-# in is important to avoid deadlocks if other MTAs/MUAs are using multiple
-# locking methods as well. Some operating systems don't allow using some of
-# them simultaneously.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_read_locks', 'fcntl') %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_write_locks', 'dotlock fcntl') %>
-
-# Maximum time to wait for lock (all of them) before aborting.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_lock_timeout', '5 mins') %>
-
-# If dotlock exists but the mailbox isn't modified in any way, override the
-# lock file after this much time.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_dotlock_change_timeout', '2 mins') %>
-
-# When mbox changes unexpectedly we have to fully read it to find out what
-# changed. If the mbox is large this can take a long time. Since the change
-# is usually just a newly appended mail, it'd be faster to simply read the
-# new mails. If this setting is enabled, Dovecot does this but still safely
-# fallbacks to re-reading the whole mbox file whenever something in mbox isn't
-# how it's expected to be. The only real downside to this setting is that if
-# some other MUA changes message flags, Dovecot doesn't notice it immediately.
-# Note that a full sync is done with SELECT, EXAMINE, EXPUNGE and CHECK
-# commands.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_dirty_syncs', true) %>
-
-# Like mbox_dirty_syncs, but don't do full syncs even with SELECT, EXAMINE,
-# EXPUNGE or CHECK commands. If this is set, mbox_dirty_syncs is ignored.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_very_dirty_syncs', false) %>
-
-# Delay writing mbox headers until doing a full write sync (EXPUNGE and CHECK
-# commands and when closing the mailbox). This is especially useful for POP3
-# where clients often delete all mails. The downside is that our changes
-# aren't immediately visible to other MUAs.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_lazy_writes', true) %>
-
-# If mbox size is smaller than this (e.g. 100k), don't write index files.
-# If an index file already exists it's still read, just not updated.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_min_index_size', 0) %>
-
-# Mail header selection algorithm to use for MD5 POP3 UIDLs when
-# pop3_uidl_format=%m. For backwards compatibility we use apop3d inspired
-# algorithm, but it fails if the first Received: header isn't unique in all
-# mails. An alternative algorithm is "all" that selects all headers.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mbox_md5', 'apop3d') %>
-
-##
-## mdbox-specific settings
-##
-
-# Maximum dbox file size until it's rotated.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mdbox_rotate_size', '2M') %>
-
-# Maximum dbox file age until it's rotated. Typically in days. Day begins
-# from midnight, so 1d = today, 2d = yesterday, etc. 0 = check disabled.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mdbox_rotate_interval', 0) %>
-
-# When creating new mdbox files, immediately preallocate their size to
-# mdbox_rotate_size. This setting currently works only in Linux with some
-# filesystems (ext4, xfs).
-<%= DovecotCookbook::Conf.attribute(@conf, 'mdbox_preallocate_space', false) %>
-
-##
-## Mail attachments
-##
-
-# sdbox and mdbox support saving mail attachments to external files, which
-# also allows single instance storage for them. Other backends don't support
-# this for now.
-
-# Directory root where to store mail attachments. Disabled, if empty.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_attachment_dir') %>
-
-# Attachments smaller than this aren't saved externally. It's also possible to
-# write a plugin to disable saving specific attachments externally.
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_attachment_min_size', '128k') %>
-
-# Filesystem backend to use for saving attachments:
-# posix : No SiS done by Dovecot (but this might help FS's own deduplication)
-# sis posix : SiS with immediate byte-by-byte comparison during saving
-# sis-queue posix : SiS with delayed comparison and deduplication
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_attachment_fs', 'sis posix') %>
-
-# Hash format to use in attachment filenames. You can add any text and
-# variables: %{md4}, %{md5}, %{sha1}, %{sha256}, %{sha512}, %{size}.
-# Variables can be truncated, e.g. %{sha256:80} returns only first 80 bits
-<%= DovecotCookbook::Conf.attribute(@conf, 'mail_attachment_hash', '%{sha1}') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-master.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-master.conf.erb
deleted file mode 100644
index 259cbcbdcae82152de843306da59177b6ab2a384..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-master.conf.erb
+++ /dev/null
@@ -1,160 +0,0 @@
-# Generated by Chef
-
-<%= DovecotCookbook::Conf.attribute(@conf, 'default_process_limit', 100) %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'default_client_limit', 1000) %>
-
-# Default VSZ (virtual memory size) limit for service processes. This is mainly
-# intended to catch and kill processes that leak memory before they eat up
-# everything.
-<%= DovecotCookbook::Conf.attribute(@conf, 'default_vsz_limit', '256M') %>
-
-# Login user is internally used by login processes. This is the most untrusted
-# user in Dovecot system. It shouldn't have access to anything at all.
-<%= DovecotCookbook::Conf.attribute(@conf, 'default_login_user', 'dovenull') %>
-
-# Internal user is used by unprivileged processes. It should be separate from
-# login user, so that login processes can't disturb other processes.
-<%= DovecotCookbook::Conf.attribute(@conf, 'default_internal_user', 'dovecot') %>
-
-<% if @services['doveadm'].kind_of?(Hash) and @services['doveadm'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('doveadm', @services['doveadm']) %>
-<% end -%>
-
-<% if @services['imap-login'].kind_of?(Hash) and @services['imap-login'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('imap-login', @services['imap-login']) %>
-<% else -%>
-service imap-login {
- inet_listener imap {
- #port = 143
- }
- inet_listener imaps {
- #port = 993
- #ssl = yes
- }
-
- # Number of connections to handle before starting a new process. Typically
- # the only useful values are 0 (unlimited) or 1. 1 is more secure, but 0
- # is faster.
- #service_count = 1
-
- # Number of processes to always keep waiting for more connections.
- #process_min_avail = 0
-
- # If you set service_count=0, you probably need to grow this.
- #vsz_limit = $default_vsz_limit
-}
-<% end -%>
-
-<% if @services['pop3-login'].kind_of?(Hash) and @services['pop3-login'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('pop3-login', @services['pop3-login']) %>
-<% else -%>
-service pop3-login {
- inet_listener pop3 {
- #port = 110
- }
- inet_listener pop3s {
- #port = 995
- #ssl = yes
- }
-}
-<% end -%>
-
-<% if @services['lmtp'].kind_of?(Hash) and @services['lmtp'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('lmtp', @services['lmtp']) %>
-<% else -%>
-service lmtp {
- unix_listener lmtp {
- #mode = 0666
- }
-
- # Create inet listener only if you can't use the above UNIX socket
- #inet_listener lmtp {
- # Avoid making LMTP visible for the entire internet
- #address =
- #port =
- #}
-}
-<% end -%>
-
-<% if @services['imap'].kind_of?(Hash) and @services['imap'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('imap', @services['imap']) %>
-<% else -%>
-service imap {
- # Most of the memory goes to mmap()ing files. You may need to increase this
- # limit if you have huge mailboxes.
- #vsz_limit = $default_vsz_limit
-
- # Max. number of IMAP processes (connections)
- #process_limit = 1024
-}
-<% end -%>
-
-<% if @services['pop3'].kind_of?(Hash) and @services['pop3'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('pop3', @services['pop3']) %>
-<% else -%>
-service pop3 {
- # Max. number of POP3 processes (connections)
- #process_limit = 1024
-}
-<% end -%>
-
-<% if @services['auth'].kind_of?(Hash) and @services['auth'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('auth', @services['auth']) %>
-<% else -%>
-service auth {
- # used by dovecot-lda, doveadm, possibly imap process, etc. Users that have
- # full permissions to this socket are able to get a list of all usernames and
- # get the results of everyone's userdb lookups.
- #
- # The default 0666 mode allows anyone to connect to the socket, but the
- # userdb lookups will succeed only if the userdb returns an "uid" field that
- # matches the caller process's UID. Also if caller's uid or gid matches the
- # socket's uid or gid the lookup succeeds. Anything else causes a failure.
- #
- # To give the caller full permissions to lookup all users, set the mode to
- # something else than 0666 and Dovecot lets the kernel enforce the
- # permissions (e.g. 0777 allows everyone full permissions).
- unix_listener auth-userdb {
- #mode = 0666
- #user =
- #group =
- }
-
- # Postfix smtp-auth
- #unix_listener /var/spool/postfix/private/auth {
- # mode = 0666
- #}
-
- # Auth process is run as this user.
- #user = $default_internal_user
-}
-<% end -%>
-
-<% if @services['auth-worker'].kind_of?(Hash) and @services['auth-worker'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('auth-worker', @services['auth-worker']) %>
-<% else -%>
-service auth-worker {
- # Auth worker process is run as root by default, so that it can access
- # /etc/shadow. If this isn't necessary, the user should be changed to
- # $default_internal_user.
- #user = root
-}
-<% end -%>
-
-<% if @services['dict'].kind_of?(Hash) and @services['dict'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('dict', @services['dict']) %>
-<% else -%>
-service dict {
- # If dict proxy is used, mail processes should have access to its socket.
- # For example: mode=0660, group=vmail and global mail_access_groups=vmail
- unix_listener dict {
- #mode = 0600
- #user =
- #group =
- }
-}
-<% end -%>
-
-<% if @services['anvil'].kind_of?(Hash) and @services['anvil'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('anvil', @services['anvil']) %>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-ssl.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-ssl.conf.erb
deleted file mode 100644
index 3946f57bca143988abdb2b51226dac7b08899669..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-ssl.conf.erb
+++ /dev/null
@@ -1,81 +0,0 @@
-# Generated by Chef
-
-##
-## SSL settings
-##
-
-# SSL/TLS support: yes, no, required.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl', 'yes') %>
-
-# PEM encoded X.509 SSL/TLS certificate and private key. They're opened before
-# dropping root privileges, so keep the key file unreadable by anyone but
-# root. Included doc/mkcert.sh can be used to easily generate self-signed
-# certificate, just make sure to update the domains in dovecot-openssl.cnf
-<% case node['platform_family']
- when 'rhel', 'fedora', 'suse'
- ssl_cert = '
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_cert', ssl_cert) %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_key', ssl_key) %>
-
-# If key file is password protected, give the password here. Alternatively
-# give it when starting dovecot with -p parameter. Since this file is often
-# world-readable, you may want to place this setting instead to a different
-# root owned 0600 file by using ssl_key_password =
-
-# PEM encoded trusted certificate authority. Set this only if you intend to use
-# ssl_verify_client_cert=yes. The file should contain the CA certificate(s)
-# followed by the matching CRL(s). (e.g. ssl_ca = <%= ca_pem %>)
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_ca') %>
-
-# Require that CRL check succeeds for client certificates.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_require_crl', true) %>
-
-# Directory and/or file for trusted SSL CA certificates. These are used only
-# when Dovecot needs to act as an SSL client (e.g. imapc backend). The
-# directory is usually /etc/ssl/certs in Debian-based systems and the file is
-# /etc/pki/tls/cert.pem in RedHat-based systems.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_client_ca_dir') %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_client_ca_file') %>
-
-# Request client to send a certificate. If you also want to require it, set
-# auth_ssl_require_client_cert=yes in auth section.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_verify_client_cert', false) %>
-
-# Which field from certificate to use for username. commonName and
-# x500UniqueIdentifier are the usual choices. You'll also need to set
-# auth_ssl_username_from_cert=yes.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_cert_username_field', 'commonName') %>
-
-# How often to regenerate the SSL parameters file. Generation is quite CPU
-# intensive operation. The value is in hours, 0 disables regeneration
-# entirely.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_parameters_regenerate', 168) %>
-
-# DH parameters length to use.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_dh_parameters_length', 1024) %>
-
-# SSL protocols to use
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_protocols', '!SSLv2') %>
-
-# SSL ciphers to use
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_cipher_list', 'ALL:!LOW:!SSLv2:!EXP:!aNULL') %>
-
-# Prefer the server's order of ciphers over client's.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_prefer_server_ciphers', false) %>
-
-# SSL crypto device to use, for valid values run "openssl engine"
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_crypto_device') %>
-
-# SSL extra options. Currently supported options are:
-# no_compression - Disable compression.
-<%= DovecotCookbook::Conf.attribute(@conf, 'ssl_options') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-tcpwrapper.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-tcpwrapper.conf.erb
deleted file mode 100644
index 77d6c724076a4a0b4fa23c776176a3a2aa1d677b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/10-tcpwrapper.conf.erb
+++ /dev/null
@@ -1,22 +0,0 @@
-# Generated by Chef
-
-# 10-tcpwrapper.conf
-#
-# service name for hosts.{allow|deny} are those defined as
-# inet_listener in master.conf
-#
-<% if @services['tcpwrap'].kind_of?(Hash) and @services['tcpwrap'].length > 0 -%>
-login_access_sockets = tcpwrap
-
-<%= DovecotCookbook::Conf.service('tcpwrap', @services['tcpwrap']) %>
-<% else -%>
-#login_access_sockets = tcpwrap
-#
-#service tcpwrap {
-# unix_listener login/tcpwrap {
-# group = $default_login_user
-# mode = 0600
-# user = $default_login_user
-# }
-#}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-lda.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-lda.conf.erb
deleted file mode 100644
index 58fa20d3266df650dd306fe44fbf225eeaaa8e17..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-lda.conf.erb
+++ /dev/null
@@ -1,54 +0,0 @@
-# Generated by Chef
-
-##
-## LDA specific settings (also used by LMTP)
-##
-
-# Address to use when sending rejection mails.
-# Default is postmaster@. %d expands to recipient domain.
-<%= DovecotCookbook::Conf.attribute(@conf, 'postmaster_address') %>
-
-# Hostname to use in various parts of sent mails (e.g. in Message-Id) and
-# in LMTP replies. Default is the system's real hostname@domain.
-<%= DovecotCookbook::Conf.attribute(@conf, 'hostname') %>
-
-# If user is over quota, return with temporary failure instead of
-# bouncing the mail.
-<%= DovecotCookbook::Conf.attribute(@conf, 'quota_full_tempfail', false) %>
-
-# Binary to use for sending mails.
-<%= DovecotCookbook::Conf.attribute(@conf, 'sendmail_path', '/usr/sbin/sendmail') %>
-
-# If non-empty, send mails via this SMTP host[:port] instead of sendmail.
-<%= DovecotCookbook::Conf.attribute(@conf, 'submission_host') %>
-
-# Subject: header to use for rejection mails. You can use the same variables
-# as for rejection_reason below.
-<%= DovecotCookbook::Conf.attribute(@conf, 'rejection_subject', 'Rejected: %s') %>
-
-# Human readable error message for rejection mails. You can use variables:
-# %n = CRLF, %r = reason, %s = original subject, %t = recipient
-<%= DovecotCookbook::Conf.attribute(@conf, 'rejection_reason', 'Your message to <%t> was automatically rejected:%n%r') %>
-
-# Delimiter character between local-part and detail in email address.
-<%= DovecotCookbook::Conf.attribute(@conf, 'recipient_delimiter', '+') %>
-
-# Header where the original recipient address (SMTP's RCPT TO: address) is taken
-# from if not available elsewhere. With dovecot-lda -a parameter overrides this.
-# A commonly used header for this is X-Original-To.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lda_original_recipient_header') %>
-
-# Should saving a mail to a nonexistent mailbox automatically create it?
-<%= DovecotCookbook::Conf.attribute(@conf, 'lda_mailbox_autocreate', false) %>
-
-# Should automatically created mailboxes be also automatically subscribed?
-<%= DovecotCookbook::Conf.attribute(@conf, 'lda_mailbox_autosubscribe', false) %>
-
-<% if @protocols['lda'].kind_of?(Hash) and @protocols['lda'].length > 0 -%>
-<%= DovecotCookbook::Conf.protocol('lda', @protocols['lda']) %>
-<% else -%>
-protocol lda {
- # Space separated list of plugins to load (default is global mail_plugins).
- #mail_plugins = $mail_plugins
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-mailboxes.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-mailboxes.conf.erb
deleted file mode 100644
index 9a8986bf99ad2ced71e996b1a14f899798b95b22..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-mailboxes.conf.erb
+++ /dev/null
@@ -1,80 +0,0 @@
-# Generated by Chef
-
-##
-## Mailbox definitions
-##
-
-# Each mailbox is specified in a separate mailbox section. The section name
-# specifies the mailbox name. If it has spaces, you can put the name
-# "in quotes". These sections can contain the following mailbox settings:
-#
-# auto:
-# Indicates whether the mailbox with this name is automatically created
-# implicitly when it is first accessed. The user can also be automatically
-# subscribed to the mailbox after creation. The following values are
-# defined for this setting:
-#
-# no - Never created automatically.
-# create - Automatically created, but no automatic subscription.
-# subscribe - Automatically created and subscribed.
-#
-# special_use:
-# A space-separated list of SPECIAL-USE flags (RFC 6154) to use for the
-# mailbox. There are no validity checks, so you could specify anything
-# you want in here, but it's not a good idea to use flags other than the
-# standard ones specified in the RFC:
-#
-# \All - This (virtual) mailbox presents all messages in the
-# user's message store.
-# \Archive - This mailbox is used to archive messages.
-# \Drafts - This mailbox is used to hold draft messages.
-# \Flagged - This (virtual) mailbox presents all messages in the
-# user's message store marked with the IMAP \Flagged flag.
-# \Junk - This mailbox is where messages deemed to be junk mail
-# are held.
-# \Sent - This mailbox is used to hold copies of messages that
-# have been sent.
-# \Trash - This mailbox is used to hold messages that have been
-# deleted.
-#
-# comment:
-# Defines a default comment or note associated with the mailbox. This
-# value is accessible through the IMAP METADATA mailbox entries
-# "/shared/comment" and "/private/comment". Users with sufficient
-# privileges can override the default value for entries with a custom
-# value.
-
-# NOTE: Assumes "namespace inbox" has been defined in 10-mail.conf.
-# namespace inbox {
-# # These mailboxes are widely used and could perhaps be created automatically:
-# mailbox Drafts {
-# special_use = \Drafts
-# }
-# mailbox Junk {
-# special_use = \Junk
-# }
-# mailbox Trash {
-# special_use = \Trash
-# }
-#
-# # For \Sent mailboxes there are two widely used names. We'll mark both of
-# # them as \Sent. User typically deletes one of them if duplicates are created.
-# mailbox Sent {
-# special_use = \Sent
-# }
-# mailbox "Sent Messages" {
-# special_use = \Sent
-# }
-#
-# # If you have a virtual "All messages" mailbox:
-# #mailbox virtual/All {
-# # special_use = \All
-# # comment = All my messages
-# #}
-#
-# # If you have a virtual "Flagged" mailbox:
-# #mailbox virtual/Flagged {
-# # special_use = \Flagged
-# # comment = All my flagged messages
-# #}
-# }
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-replication.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-replication.conf.erb
deleted file mode 100644
index 0724ff35a80b68e7f98c6852d30279bd2d221701..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/15-replication.conf.erb
+++ /dev/null
@@ -1,20 +0,0 @@
-# Generated by Chef
-
-# This file will contain replication settings in case of a replicated
-# master/master dovecot setup
-
-<% if @services['replicator'].kind_of?(Hash) and @services['replicator'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('replicator', @services['replicator']) %>
-<% end -%>
-
-<% if @services['aggregator'].kind_of?(Hash) and @services['aggregator'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('aggregator', @services['aggregator']) %>
-<% end -%>
-
-<% if @services['config'].kind_of?(Hash) and @services['config'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('config', @services['config']) %>
-<% end -%>
-
-#Doveadm port and password for replication
-<%= DovecotCookbook::Conf.attribute(@conf, 'doveadm_port', 3333) %>
-<%= DovecotCookbook::Conf.attribute(@conf, 'doveadm_password', 'secret') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-imap.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-imap.conf.erb
deleted file mode 100644
index 801e53ea5d9c2e9357bc8cf7db555e733aff8f37..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-imap.conf.erb
+++ /dev/null
@@ -1,81 +0,0 @@
-# Generated by Chef
-
-##
-## IMAP specific settings
-##
-
-<% if @protocols['imap'].kind_of?(Hash) and @protocols['imap'].length > 0 -%>
-<%= DovecotCookbook::Conf.protocol('imap', @protocols['imap']) %>
-<% else -%>
-protocol imap {
- # If nothing happens for this long while client is IDLEing, move the connection
- # to imap-hibernate process and close the old imap process. This saves memory,
- # because connections use very little memory in imap-hibernate process. The
- # downside is that recreating the imap process back uses some resources.
- #imap_hibernate_timeout = 0
-
- # Maximum IMAP command line length. Some clients generate very long command
- # lines with huge mailboxes, so you may need to raise this if you get
- # "Too long argument" or "IMAP command line too large" errors often.
- #imap_max_line_length = 64k
-
- # Maximum number of IMAP connections allowed for a user from each IP address.
- # NOTE: The username is compared case-sensitively.
- #mail_max_userip_connections = 10
-
- # Space separated list of plugins to load (default is global mail_plugins).
- #mail_plugins = $mail_plugins
-
- # IMAP logout format string:
- # %i - total number of bytes read from client
- # %o - total number of bytes sent to client
- # %{fetch_hdr_count} - Number of mails with mail header data sent to client
- # %{fetch_hdr_bytes} - Number of bytes with mail header data sent to client
- # %{fetch_body_count} - Number of mails with mail body data sent to client
- # %{fetch_body_bytes} - Number of bytes with mail body data sent to client
- # %{deleted} - Number of mails where client added \Deleted flag
- # %{expunged} - Number of mails that client expunged
- # %{trashed} - Number of mails that client copied/moved to the
- # special_use=\Trash mailbox.
- #imap_logout_format = bytes=%i/%o
-
- # Override the IMAP CAPABILITY response. If the value begins with '+',
- # add the given capabilities on top of the defaults (e.g. +XFOO XBAR).
- #imap_capability =
-
- # How long to wait between "OK Still here" notifications when client is
- # IDLEing.
- #imap_idle_notify_interval = 2 mins
-
- # ID field names and values to send to clients. Using * as the value makes
- # Dovecot use the default value. The following fields have default values
- # currently: name, version, os, os-version, support-url, support-email.
- #imap_id_send =
-
- # ID fields sent by client to log. * means everything.
- #imap_id_log =
-
- # Workarounds for various client bugs:
- # delay-newmail:
- # Send EXISTS/RECENT new mail notifications only when replying to NOOP
- # and CHECK commands. Some clients ignore them otherwise, for example OSX
- # Mail (
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-lmtp.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-lmtp.conf.erb
deleted file mode 100644
index 0c0d1619e4c8dd8a2cb9e48bd95ee4f1889f2e39..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-lmtp.conf.erb
+++ /dev/null
@@ -1,32 +0,0 @@
-# Generated by Chef
-
-##
-## LMTP specific settings
-##
-
-# Support proxying to other LMTP/SMTP servers by performing passdb lookups.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lmtp_proxy', false) %>
-
-# When recipient address includes the detail (e.g. user+detail), try to save
-# the mail to the detail mailbox. See also recipient_delimiter and
-# lda_mailbox_autocreate settings.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lmtp_save_to_detail_mailbox', false) %>
-
-# Verify quota before replying to RCPT TO. This adds a small overhead.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lmtp_rcpt_check_quota', false) %>
-
-# Which recipient address to use for Delivered-To: header and Received:
-# header. The default is "final", which is the same as the one given to
-# RCPT TO command. "original" uses the address given in RCPT TO's ORCPT
-# parameter, "none" uses nothing. Note that "none" is currently always used
-# when a mail has multiple recipients.
-<%= DovecotCookbook::Conf.attribute(@conf, 'lmtp_hdr_delivery_address', 'final') %>
-
-<% if @protocols['lmtp'].kind_of?(Hash) and @protocols['lmtp'].length > 0 -%>
-<%= DovecotCookbook::Conf.protocol('lmtp', @protocols['lmtp']) %>
-<% else -%>
-protocol lmtp {
- # Space separated list of plugins to load (default is global mail_plugins).
- #mail_plugins = $mail_plugins
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-managesieve.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-managesieve.conf.erb
deleted file mode 100644
index 4f367c8727f7d81899fa360090918cfe0a92e8d9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-managesieve.conf.erb
+++ /dev/null
@@ -1,87 +0,0 @@
-# Generated by Chef
-
-##
-## ManageSieve specific settings
-##
-
-# Service definitions
-
-<% if @services['managesieve-login'].kind_of?(Hash) and @services['managesieve-login'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('managesieve-login', @services['managesieve-login']) %>
-<% else -%>
-service managesieve-login {
- #inet_listener sieve {
- # port = 4190
- #}
-
- #inet_listener sieve_deprecated {
- # port = 2000
- #}
-
- # Number of connections to handle before starting a new process. Typically
- # the only useful values are 0 (unlimited) or 1. 1 is more secure, but 0
- # is faster.
- #service_count = 1
-
- # Number of processes to always keep waiting for more connections.
- #process_min_avail = 0
-
- # If you set service_count=0, you probably need to grow this.
- #vsz_limit = 64M
-}
-<% end -%>
-
-<% if @services['managesieve'].kind_of?(Hash) and @services['managesieve'].length > 0 -%>
-<%= DovecotCookbook::Conf.service('managesieve', @services['managesieve']) %>
-<% else -%>
-service managesieve {
- # Max. number of ManageSieve processes (connections)
- #process_count = 1024
-}
-<% end -%>
-
-# Service configuration
-
-<% if @protocols['sieve'].kind_of?(Hash) and @protocols['sieve'].length > 0 -%>
-<%= DovecotCookbook::Conf.protocol('sieve', @protocols['sieve']) %>
-<% else -%>
-protocol sieve {
- # Maximum ManageSieve command line length in bytes. ManageSieve usually does
- # not involve overly long command lines, so this setting will not normally
- # need adjustment
- #managesieve_max_line_length = 65536
-
- # Maximum number of ManageSieve connections allowed for a user from each IP
- # address.
- # NOTE: The username is compared case-sensitively.
- #mail_max_userip_connections = 10
-
- # Space separated list of plugins to load (none known to be useful so far).
- # Do NOT try to load IMAP plugins here.
- #mail_plugins =
-
- # MANAGESIEVE logout format string:
- # %i - total number of bytes read from client
- # %o - total number of bytes sent to client
- #managesieve_logout_format = bytes=%i/%o
-
- # To fool ManageSieve clients that are focused on CMU's timesieved you can
- # specify the IMPLEMENTATION capability that Dovecot reports to clients.
- # For example: 'Cyrus timsieved v2.2.13'
- #managesieve_implementation_string = Dovecot Pigeonhole
-
- # Explicitly specify the SIEVE and NOTIFY capability reported by the server
- # before login. If left unassigned these will be reported dynamically
- # according to what the Sieve interpreter supports by default (after login
- # this may differ depending on the user).
- #managesieve_sieve_capability =
- #managesieve_notify_capability =
-
- # The maximum number of compile errors that are returned to the client upon
- # script upload or script verification.
- #managesieve_max_compile_errors = 5
-
- # Refer to 90-sieve.conf for script quota configuration and configuration of
- # Sieve execution limits.
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-pop3.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-pop3.conf.erb
deleted file mode 100644
index 10032c8be4ab1c47b30b1c05568914fc6fff1927..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/20-pop3.conf.erb
+++ /dev/null
@@ -1,105 +0,0 @@
-# Generated by Chef
-
-##
-## POP3 specific settings
-##
-
-<% if @protocols['pop3'].kind_of?(Hash) and @protocols['pop3'].length > 0 -%>
-<%= DovecotCookbook::Conf.protocol('pop3', @protocols['pop3']) %>
-<% else -%>
-protocol pop3 {
- # Don't try to set mails non-recent or seen with POP3 sessions. This is
- # mostly intended to reduce disk I/O. With maildir it doesn't move files
- # from new/ to cur/, with mbox it doesn't write Status-header.
- #pop3_no_flag_updates = no
-
- # Support LAST command which exists in old POP3 specs, but has been removed
- # from new ones. Some clients still wish to use this though. Enabling this
- # makes RSET command clear all \Seen flags from messages.
- #pop3_enable_last = no
-
- # If mail has X-UIDL header, use it as the mail's UIDL.
- #pop3_reuse_xuidl = no
-
- # Allow only one POP3 session to run simultaneously for the same user.
- #pop3_lock_session = no
-
- # POP3 requires message sizes to be listed as if they had CR+LF linefeeds.
- # Many POP3 servers violate this by returning the sizes with LF linefeeds,
- # because it's faster to get. When this setting is enabled, Dovecot still
- # tries to do the right thing first, but if that requires opening the
- # message, it fallbacks to the easier (but incorrect) size.
- #pop3_fast_size_lookups = no
-
- # POP3 UIDL (unique mail identifier) format to use. You can use following
- # variables, along with the variable modifiers described in
- # doc/wiki/Variables.txt (e.g. %Uf for the filename in uppercase)
- #
- # %v - Mailbox's IMAP UIDVALIDITY
- # %u - Mail's IMAP UID
- # %m - MD5 sum of the mailbox headers in hex (mbox only)
- # %f - filename (maildir only)
- # %g - Mail's GUID
- #
- # If you want UIDL compatibility with other POP3 servers, use:
- # UW's ipop3d : %08Xv%08Xu
- # Courier : %f or %v-%u (both might be used simultaneosly)
- # Cyrus (<= 2.1.3) : %u
- # Cyrus (>= 2.1.4) : %v.%u
- # Dovecot v0.99.x : %v.%u
- # tpop3d : %Mf
- #
- # Note that Outlook 2003 seems to have problems with %v.%u format which was
- # Dovecot's default, so if you're building a new server it would be a good
- # idea to change this. %08Xu%08Xv should be pretty fail-safe.
- #
- #pop3_uidl_format = %08Xu%08Xv
-
- # Permanently save UIDLs sent to POP3 clients, so pop3_uidl_format changes
- # won't change those UIDLs. Currently this works only with Maildir.
- #pop3_save_uidl = no
-
- # What to do about duplicate UIDLs if they exist?
- # allow: Show duplicates to clients.
- # rename: Append a temporary -2, -3, etc. counter after the UIDL.
- #pop3_uidl_duplicates = allow
-
- # This option changes POP3 behavior so that it's not possible to actually
- # delete mails via POP3, only hide them from future POP3 sessions. The mails
- # will still be counted towards user's quota until actually deleted via IMAP.
- # Use e.g. "$POP3Deleted" as the value (it will be visible as IMAP keyword).
- # Make sure you can legally archive mails before enabling this setting.
- #pop3_deleted_flag =
-
- # POP3 logout format string:
- # %i - total number of bytes read from client
- # %o - total number of bytes sent to client
- # %t - number of TOP commands
- # %p - number of bytes sent to client as a result of TOP command
- # %r - number of RETR commands
- # %b - number of bytes sent to client as a result of RETR command
- # %d - number of deleted messages
- # %{deleted_bytes} - number of bytes in deleted messages
- # %m - number of messages (before deletion)
- # %s - mailbox size in bytes (before deletion)
- # %u - old/new UIDL hash. may help finding out if UIDLs changed unexpectedly
- #pop3_logout_format = top=%t/%p, retr=%r/%b, del=%d/%m, size=%s
-
- # Maximum number of POP3 connections allowed for a user from each IP address.
- # NOTE: The username is compared case-sensitively.
- #mail_max_userip_connections = 10
-
- # Space separated list of plugins to load (default is global mail_plugins).
- #mail_plugins = $mail_plugins
-
- # Workarounds for various client bugs:
- # outlook-no-nuls:
- # Outlook and Outlook Express hang if mails contain NUL characters.
- # This setting replaces them with 0x80 character.
- # oe-ns-eoh:
- # Outlook Express and Netscape Mail breaks if end of headers-line is
- # missing. This option simply sends it if it's missing.
- # The list is space-separated.
- #pop3_client_workarounds =
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-acl.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-acl.conf.erb
deleted file mode 100644
index 0175b7b810222de2f590a7bbc24950a536185b66..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-acl.conf.erb
+++ /dev/null
@@ -1,25 +0,0 @@
-# Generated by Chef
-
-##
-## Mailbox access control lists.
-##
-
-# vfile backend reads ACLs from "dovecot-acl" file from mail directory.
-# You can also optionally give a global ACL directory path where ACLs are
-# applied to all users' mailboxes. The global ACL directory contains
-# one file for each mailbox, eg. INBOX or sub.mailbox. cache_secs parameter
-# specifies how many seconds to wait between stat()ing dovecot-acl file
-# to see if it changed.
-<% if @plugins.has_key?('acl') and @plugins['acl'].kind_of?(Hash) -%>
-<%= DovecotCookbook::Conf.plugin('acl', @plugins['acl']) %>
-<% else -%>
-plugin {
- #acl = vfile:/etc/dovecot/global-acls:cache_secs=300
-}
-
-# To let users LIST mailboxes shared by other users, Dovecot needs a
-# shared mailbox dictionary. For example:
-plugin {
- #acl_shared_dict = file:/var/lib/dovecot/shared-mailboxes
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-plugin.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-plugin.conf.erb
deleted file mode 100644
index d717d27bf82acdce922c8631519f24634d50dab4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-plugin.conf.erb
+++ /dev/null
@@ -1,23 +0,0 @@
-# Generated by Chef
-
-##
-## Plugin settings
-##
-
-# All wanted plugins must be listed in mail_plugins setting before any of the
-# settings take effect. See for list of plugins and
-# their configuration. Note that %variable expansion is done for all values.
-
-<% unknown_plugins = DovecotCookbook::Plugins.list_unknown(@plugins)
- if unknown_plugins.length > 0
- unknown_plugins.each do |plugin|
- if @plugins[plugin].kind_of?(Hash)
- -%>
-<%= DovecotCookbook::Conf.plugin(plugin, @plugins[plugin]) %>
-<% end
- end
- else -%>
-plugin {
- #setting_name = value
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-quota.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-quota.conf.erb
deleted file mode 100644
index e5631c3fca3d66537a27fab0cea040f88e550083..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-quota.conf.erb
+++ /dev/null
@@ -1,95 +0,0 @@
-# Generated by Chef
-
-##
-## Quota configuration.
-##
-
-# Note that you also have to enable quota plugin in mail_plugins setting.
-#
-
-<% if [@plugins['quota'], @services['quota-status'], @services['quota-warning']].any? {|p| !p.nil?} -%>
- <% unless @plugins['quota'].nil? or !@plugins['quota'].kind_of?(Hash) or @plugins['quota'].empty? -%>
-<%= DovecotCookbook::Conf.plugin('quota', @plugins['quota']) %>
- <% end -%>
- <% unless @services['quota-status'].nil? or !@services['quota-status'].kind_of?(Hash) or @services['quota-status'].empty? -%>
-<%= DovecotCookbook::Conf.service('quota-status', @services['quota-status']) %>
- <% end -%>
- <% unless @services['quota-warning'].nil? or !@services['quota-warning'].kind_of?(Hash) or @services['quota-warning'].empty? -%>
-<%= DovecotCookbook::Conf.service('quota-warning', @services['quota-warning']) %>
- <% end -%>
-<% else -%>
-
-##
-## Quota limits
-##
-
-# Quota limits are set using "quota_rule" parameters. To get per-user quota
-# limits, you can set/override them by returning "quota_rule" extra field
-# from userdb. It's also possible to give mailbox-specific limits, for example
-# to give additional 100 MB when saving to Trash:
-
-plugin {
- #quota_rule = *:storage=1G
- #quota_rule2 = Trash:storage=+100M
-
- # LDA/LMTP allows saving the last mail to bring user from under quota to
- # over quota, if the quota doesn't grow too high. Default is to allow as
- # long as quota will stay under 10% above the limit. Also allowed e.g. 10M.
- #quota_grace = 10%%
-}
-
-##
-## Quota warnings
-##
-
-# You can execute a given command when user exceeds a specified quota limit.
-# Each quota root has separate limits. Only the command for the first
-# exceeded limit is excecuted, so put the highest limit first.
-# The commands are executed via script service by connecting to the named
-# UNIX socket (quota-warning below).
-# Note that % needs to be escaped as %%, otherwise "% " expands to empty.
-
-plugin {
- #quota_warning = storage=95%% quota-warning 95 %u
- #quota_warning2 = storage=80%% quota-warning 80 %u
-}
-
-# Example quota-warning service. The unix listener's permissions should be
-# set in a way that mail processes can connect to it. Below example assumes
-# that mail processes run as vmail user. If you use mode=0666, all system users
-# can generate quota warnings to anyone.
-#service quota-warning {
-# executable = script /usr/local/bin/quota-warning.sh
-# user = dovecot
-# unix_listener quota-warning {
-# user = vmail
-# }
-#}
-
-##
-## Quota backends
-##
-
-# Multiple backends are supported:
-# dirsize: Find and sum all the files found from mail directory.
-# Extremely SLOW with Maildir. It'll eat your CPU and disk I/O.
-# dict: Keep quota stored in dictionary (eg. SQL)
-# maildir: Maildir++ quota
-# fs: Read-only support for filesystem quota
-
-plugin {
- #quota = dirsize:User quota
- #quota = maildir:User quota
- #quota = dict:User quota::proxy::quota
- #quota = fs:User quota
-}
-
-# Multiple quota roots are also possible, for example this gives each user
-# their own 100MB quota and one shared 1GB quota within the domain:
-plugin {
- #quota = dict:user::proxy::quota
- #quota2 = dict:domain:%d:proxy::quota_domain
- #quota_rule = *:storage=102400
- #quota2_rule = *:storage=1048576
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-sieve.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-sieve.conf.erb
deleted file mode 100644
index f68fd62cd3854b4315b76109943b206cdfcad463..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/90-sieve.conf.erb
+++ /dev/null
@@ -1,89 +0,0 @@
-# Generated by Chef
-
-##
-## Settings for the Sieve interpreter
-##
-
-# Do not forget to enable the Sieve plugin in 15-lda.conf and 20-lmtp.conf
-# by adding it to the respective mail_plugins= settings.
-
-<% if @plugins.has_key?('sieve') and @plugins['sieve'].kind_of?(Hash) -%>
-<%= DovecotCookbook::Conf.plugin('sieve', @plugins['sieve']) %>
-<% else -%>
-plugin {
- # The path to the user's main active script. If ManageSieve is used, this the
- # location of the symbolic link controlled by ManageSieve.
- #sieve = ~/.dovecot.sieve
-
- # The default Sieve script when the user has none. This is a path to a global
- # sieve script file, which gets executed ONLY if user's private Sieve script
- # doesn't exist. Be sure to pre-compile this script manually using the sievec
- # command line tool.
- # --> See sieve_before fore executing scripts before the user's personal
- # script.
- #sieve_global_path = /var/lib/dovecot/sieve/default.sieve
-
- # Directory for :personal include scripts for the include extension. This
- # is also where the ManageSieve service stores the user's scripts.
- #sieve_dir = ~/sieve
-
- # Directory for :global include scripts for the include extension.
- #sieve_global_dir =
-
- # Path to a script file or a directory containing script files that need to be
- # executed before the user's script. If the path points to a directory, all
- # the Sieve scripts contained therein (with the proper .sieve extension) are
- # executed. The order of execution is determined by the file names, using a
- # normal 8bit per-character comparison.
- #sieve_before =
-
- # Identical to sieve_before, only the specified scripts are executed after the
- # user's script (only when keep is still in effect!).
- #sieve_after =
-
- # Which Sieve language extensions are available to users. By default, all
- # supported extensions are available, except for deprecated extensions or
- # those that are still under development. Some system administrators may want
- # to disable certain Sieve extensions or enable those that are not available
- # by default. This setting can use '+' and '-' to specify differences relative
- # to the default. For example `sieve_extensions = +imapflags' will enable the
- # deprecated imapflags extension in addition to all extensions thatwere
- # already enabled by default.
- #sieve_extensions = +notify +imapflags
-
- # The Pigeonhole Sieve interpreter can have plugins of its own. Using this
- # setting, the used plugins can be specified. Check the Dovecot wiki
- # (wiki2.dovecot.org) or the pigeonhole website
- # (http://pigeonhole.dovecot.org) for available plugins.
- #sieve_plugins =
-
- # The separator that is expected between the :user and :detail
- # address parts introduced by the subaddress extension. This may
- # also be a sequence of characters (e.g. '--'). The current
- # implementation looks for the separator from the left of the
- # localpart and uses the first one encountered. The :user part is
- # left of the separator and the :detail part is right. This setting
- # is also used by Dovecot's LMTP service.
- #recipient_delimiter = +
-
- # The maximum size of a Sieve script. The compiler will refuse to
- # compile any script larger than this limit.
- #sieve_max_script_size = 1M
-
- # The maximum number of actions that can be performed during a single
- # script execution.
- #sieve_max_actions = 32
-
- # The maximum number of redirect actions that can be performed during
- # a single script execution.
- #sieve_max_redirects = 4
-
- # The maximum number of personal Sieve scripts a single user can have.
- # (Currently only relevant for ManageSieve)
- #sieve_quota_max_scripts = 0
-
- # The maximum amount of disk storage a single user's scripts may occupy.
- # (Currently only relevant for ManageSieve)
- #sieve_quota_max_storage = 0
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-checkpassword.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-checkpassword.conf.ext.erb
deleted file mode 100644
index 4305d33c9d982e234cf909e3c062938b2be73794..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-checkpassword.conf.ext.erb
+++ /dev/null
@@ -1,31 +0,0 @@
-# Generated by Chef
-
-# Authentication for checkpassword users. Included from 10-auth.conf.
-#
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['checkpassword']) -%>
-<%= DovecotCookbook::Conf.authdb('checkpassword', 'passdb', @auth['checkpassword']['passdb']) %>
-<% else -%>
-passdb {
- driver = checkpassword
- args = /usr/bin/checkpassword
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['checkpassword']) -%>
-<%= DovecotCookbook::Conf.authdb('checkpassword', 'userdb', @auth['checkpassword']['userdb']) %>
-<% else -%>
-# passdb lookup should return also userdb info
-userdb {
- driver = prefetch
-}
-<% end -%>
-
-# Standard checkpassword doesn't support direct userdb lookups.
-# If you need checkpassword userdb, the checkpassword must support
-# Dovecot-specific extensions.
-#userdb {
-# driver = checkpassword
-# args = /usr/bin/checkpassword
-#}
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-deny.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-deny.conf.ext.erb
deleted file mode 100644
index 45095f5439529f3126c539795e015ee38435702c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-deny.conf.ext.erb
+++ /dev/null
@@ -1,21 +0,0 @@
-# Generated by Chef
-
-# Deny access for users. Included from 10-auth.conf.
-
-# Users can be (temporarily) disabled by adding a passdb with deny=yes.
-# If the user is found from that database, authentication will fail.
-# The deny passdb should always be specified before others, so it gets
-# checked first.
-
-<% if DovecotCookbook::Auth.passdb?(@auth['deny']) -%>
-<%= DovecotCookbook::Conf.authdb('passwd-file', 'passdb', @auth['deny']['passdb']) %>
-<% else -%>
-# Example deny passdb using passwd-file. You can use any passdb though.
-passdb {
- driver = passwd-file
- deny = yes
-
- # File contains a list of usernames, one per line
- args = /etc/dovecot/deny-users
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-dict.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-dict.conf.ext.erb
deleted file mode 100644
index f3acc1e24ab606450e941f5aee00bfdd6c729b83..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-dict.conf.ext.erb
+++ /dev/null
@@ -1,26 +0,0 @@
-# Generated by Chef
-
-# Authentication via dict backend. Included from 10-auth.conf.
-#
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['dict']) -%>
-<%= DovecotCookbook::Conf.authdb('dict', 'passdb', @auth['dict']['passdb']) %>
-<% else -%>
-passdb {
- driver = dict
-
- # Path for dict configuration file, see
- # example-config/dovecot-dict-auth.conf.ext
- args = /etc/dovecot/dovecot-dict-auth.conf.ext
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['dict']) -%>
-<%= DovecotCookbook::Conf.authdb('dict', 'userdb', @auth['dict']['userdb']) %>
-<% else -%>
-userdb {
- driver = dict
- args = /etc/dovecot/dovecot-dict-auth.conf.ext
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-ldap.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-ldap.conf.ext.erb
deleted file mode 100644
index d10225be12015a2d0f6215417fc78a3cec2c2372..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-ldap.conf.ext.erb
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Chef
-
-# Authentication for LDAP users. Included from 10-auth.conf.
-#
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['ldap']) -%>
-<%= DovecotCookbook::Conf.authdb('ldap', 'passdb', @auth['ldap']['passdb']) %>
-<% else -%>
-passdb {
- driver = ldap
-
- # Path for LDAP configuration file, see example-config/dovecot-ldap.conf.ext
- args = /etc/dovecot/dovecot-ldap.conf.ext
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['ldap']) -%>
-<%= DovecotCookbook::Conf.authdb('ldap', 'userdb', @auth['ldap']['userdb']) %>
-<% else -%>
-# "prefetch" user database means that the passdb already provided the
-# needed information and there's no need to do a separate userdb lookup.
-#
-#userdb {
-# driver = prefetch
-#}
-
-userdb {
- driver = ldap
- args = /etc/dovecot/dovecot-ldap.conf.ext
-
- # Default fields can be used to specify defaults that LDAP may override
- #default_fields = home=/home/virtual/%u
-}
-
-# If you don't have any user-specific settings, you can avoid the userdb LDAP
-# lookup by using userdb static instead of userdb ldap, for example:
-#
-#userdb {
- #driver = static
- #args = uid=vmail gid=vmail home=/var/vmail/%u
-#}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-master.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-master.conf.ext.erb
deleted file mode 100644
index 3aed0840fb73cc890d3cb564de8d4496c523c75f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-master.conf.ext.erb
+++ /dev/null
@@ -1,22 +0,0 @@
-# Generated by Chef
-
-# Authentication for master users. Included from 10-auth.conf.
-
-# By adding master=yes setting inside a passdb you make the passdb a list
-# of "master users", who can log in as anyone else.
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['master']) -%>
-<%= DovecotCookbook::Conf.authdb('passwd-file', 'passdb', @auth['master']['passdb']) %>
-<% else -%>
-# Example master user passdb using passwd-file. You can use any passdb though.
-passdb {
- driver = passwd-file
- master = yes
- args = /etc/dovecot/master-users
-
- # Unless you're using PAM, you probably still want the destination user to
- # be looked up from passdb that it really exists. pass=yes does that.
- pass = yes
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-passwdfile.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-passwdfile.conf.ext.erb
deleted file mode 100644
index c76d50d302ce0bfc12e521361d5394f2d3a7fae9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-passwdfile.conf.ext.erb
+++ /dev/null
@@ -1,30 +0,0 @@
-# Generated by Chef
-
-# Authentication for passwd-file users. Included from 10-auth.conf.
-#
-# passwd-like file with specified location.
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['passwdfile']) -%>
-<%= DovecotCookbook::Conf.authdb('passwd-file', 'passdb', @auth['passwdfile']['passdb']) %>
-<% else -%>
-passdb {
- driver = passwd-file
- args = scheme=CRYPT username_format=%u /etc/dovecot/users
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['passwdfile']) -%>
-<%= DovecotCookbook::Conf.authdb('passwd-file', 'userdb', @auth['passwdfile']['userdb']) %>
-<% else -%>
-userdb {
- driver = passwd-file
- args = username_format=%u /etc/dovecot/users
-
- # Default fields that can be overridden by passwd-file
- #default_fields = quota_rule=*:storage=1G
-
- # Override fields from passwd-file
- #override_fields = home=/home/virtual/%u
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-sql.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-sql.conf.ext.erb
deleted file mode 100644
index ae51bcd7a0eeeeb167de9ec4a975bae00e231204..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-sql.conf.ext.erb
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Chef
-
-# Authentication for SQL users. Included from 10-auth.conf.
-#
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['sql']) -%>
-<%= DovecotCookbook::Conf.authdb('sql', 'passdb', @auth['sql']['passdb']) %>
-<% else -%>
-passdb {
- driver = sql
-
- # Path for SQL configuration file, see example-config/dovecot-sql.conf.ext
- args = /etc/dovecot/dovecot-sql.conf.ext
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['sql']) -%>
-<%= DovecotCookbook::Conf.authdb('sql', 'userdb', @auth['sql']['userdb']) %>
-<% else -%>
-# "prefetch" user database means that the passdb already provided the
-# needed information and there's no need to do a separate userdb lookup.
-#
-#userdb {
-# driver = prefetch
-#}
-
-userdb {
- driver = sql
- args = /etc/dovecot/dovecot-sql.conf.ext
-}
-
-# If you don't have any user-specific settings, you can avoid the user_query
-# by using userdb static instead of userdb sql, for example:
-#
-#userdb {
- #driver = static
- #args = uid=vmail gid=vmail home=/var/vmail/%u
-#}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-static.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-static.conf.ext.erb
deleted file mode 100644
index a2c36f27c3429baa48fd1c90d940f6c1ed6fe6fa..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-static.conf.ext.erb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Chef
-
-# Static passdb. Included from 10-auth.conf.
-
-# This can be used for situations where Dovecot doesn't need to verify the
-# username or the password, or if there is a single password for all users:
-#
-# - proxy frontend, where the backend verifies the password
-# - proxy backend, where the frontend already verified the password
-# - authentication with SSL certificates
-# - simple testing
-
-<% if DovecotCookbook::Auth.passdb?(@auth['static']) -%>
-<%= DovecotCookbook::Conf.authdb('static', 'passdb', @auth['static']['passdb']) %>
-<% else -%>
-#passdb {
-# driver = static
-# args = proxy=y host=%1Mu.example.com nopassword=y
-#}
-
-#passdb {
-# driver = static
-# args = password=test
-#}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['static']) -%>
-<%= DovecotCookbook::Conf.authdb('static', 'userdb', @auth['static']['userdb']) %>
-<% else -%>
-#userdb {
-# driver = static
-# args = uid=vmail gid=vmail home=/home/%u
-#}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-system.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-system.conf.ext.erb
deleted file mode 100644
index 16394ee3530b67781eaa641bab1dbefe7ce56cf2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-system.conf.ext.erb
+++ /dev/null
@@ -1,84 +0,0 @@
-# Generated by Chef
-
-# Authentication for system users. Included from 10-auth.conf.
-#
-#
-#
-
-# PAM authentication. Preferred nowadays by most systems.
-# PAM is typically used with either userdb passwd or userdb static.
-# REMEMBER: You'll need /etc/pam.d/dovecot file created for PAM
-# authentication to actually work.
-<% if DovecotCookbook::Auth.passdb?(@auth['system']) -%>
-<%= DovecotCookbook::Conf.authdb('pam', 'passdb', @auth['system']['passdb']) %>
-<% else -%>
-passdb {
- driver = pam
- # [session=yes] [setcred=yes] [failure_show_msg=yes] [max_requests=]
- # [cache_key=] []
- #args = dovecot
-}
-
-# System users (NSS, /etc/passwd, or similiar).
-# In many systems nowadays this uses Name Service Switch, which is
-# configured in /etc/nsswitch.conf.
-#passdb {
- #driver = passwd
- # [blocking=no]
- #args =
-#}
-
-# Shadow passwords for system users (NSS, /etc/shadow or similiar).
-# Deprecated by PAM nowadays.
-#
-#passdb {
- #driver = shadow
- # [blocking=no]
- #args =
-#}
-
-# PAM-like authentication for OpenBSD.
-#
-#passdb {
- #driver = bsdauth
- # [blocking=no] [cache_key=]
- #args =
-#}
-<% end -%>
-
-##
-## User databases
-##
-
-# System users (NSS, /etc/passwd, or similiar). In many systems nowadays this
-# uses Name Service Switch, which is configured in /etc/nsswitch.conf.
-<% if DovecotCookbook::Auth.userdb?(@auth['system']) -%>
-<%= DovecotCookbook::Conf.authdb('passwd', 'userdb', @auth['system']['userdb']) %>
-<% else -%>
-userdb {
- #
- driver = passwd
- # [blocking=no]
- #args =
-
- # Override fields from passwd
- #override_fields = home=/home/virtual/%u
-}
-
-# Static settings generated from template
-#userdb {
- #driver = static
- # Can return anything a userdb could normally return. For example:
- #
- # args = uid=500 gid=500 home=/var/mail/%u
- #
- # LDA and LMTP needs to look up users only from the userdb. This of course
- # doesn't work with static userdb because there is no list of users.
- # Normally static userdb handles this by doing a passdb lookup. This works
- # with most passdbs, with PAM being the most notable exception. If you do
- # the user verification another way, you can add allow_all_users=yes to
- # the args in which case the passdb lookup is skipped.
- #
- #args =
-#}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-vpopmail.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-vpopmail.conf.ext.erb
deleted file mode 100644
index ddaf63547b72b621527d55d260a338606ace39ed..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/conf.d/auth-vpopmail.conf.ext.erb
+++ /dev/null
@@ -1,27 +0,0 @@
-# Generated by Chef
-
-# Authentication for vpopmail users. Included from 10-auth.conf.
-#
-#
-
-<% if DovecotCookbook::Auth.passdb?(@auth['vpopmail']) -%>
-<%= DovecotCookbook::Conf.authdb('vpopmail', 'passdb', @auth['vpopmail']['passdb']) %>
-<% else -%>
-passdb {
- driver = vpopmail
-
- # [cache_key=] [webmail=]
- args =
-}
-<% end -%>
-
-<% if DovecotCookbook::Auth.userdb?(@auth['vpopmail']) -%>
-<%= DovecotCookbook::Conf.authdb('vpopmail', 'userdb', @auth['vpopmail']['userdb']) %>
-<% else -%>
-userdb {
- driver = vpopmail
-
- # [quota_template=] - %q expands to Maildir++ quota
- args = quota_template=quota_rule=*:backend=%q
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-db.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-db.conf.ext.erb
deleted file mode 100644
index c10e5739726d8d7d9ce546cb4cfecd44b9830a65..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-db.conf.ext.erb
+++ /dev/null
@@ -1,19 +0,0 @@
-# Generated by Chef
-
-# Example DB_CONFIG for Berkeley DB. Typically dict_db_config setting is used
-# to point to this file.
-# http://www.oracle.com/technology/documentation/berkeley-db/db/ref/env/db_config.html
-
-<% if @conf['db'].kind_of?(Hash) -%>
-<% @conf['db'].sort.each do |k, v| -%>
-<%= k %> <%= DovecotCookbook::Conf.value(v) %>
-<% end -%>
-<% else -%>
-# Maximum number of simultaneous transactions.
-set_tx_max 1000
-
-# http://www.oracle.com/technology/documentation/berkeley-db/db/ref/lock/max.html
-#set_lk_max_locks 1000
-#set_lk_max_lockers 1000
-#set_lk_max_objects 1000
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-auth.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-auth.conf.ext.erb
deleted file mode 100644
index a6ac11c587492cda31fd2d62711a3d81f587ccc0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-auth.conf.ext.erb
+++ /dev/null
@@ -1,68 +0,0 @@
-# Generated by Chef
-
-# This file is commonly accessed via passdb {} or userdb {} section in
-# conf.d/auth-dict.conf.ext
-
-# Dictionary URI
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'uri') %>
-
-# Key for passdb lookups
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'password_key', 'dovecot/passdb/%u') %>
-
-# Key for userdb lookups
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'user_key', 'dovecot/userdb/%u') %>
-
-# How to parse the value for key=value pairs. Currently we support only JSON
-# format with { "key": "value", ... } object.
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'value_format', 'json') %>
-
-# Default password scheme
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'default_pass_scheme', 'MD5') %>
-
-# Username iteration prefix. Keys under this are assumed to contain usernames.
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'iterate_prefix', 'userdb/') %>
-
-# Should iteration be disabled for this userdb? If this userdb acts only as a
-# cache there's no reason to try to iterate the (partial & duplicate) users.
-<%= DovecotCookbook::Conf.attribute(@conf['dict_auth'], 'iterate_disable', 'no') %>
-
-# TODO (Chef Cookbook): Support the configuration below.
-
-# The example here shows how to do multiple dict lookups and merge the replies.
-# The "passdb" and "userdb" keys are JSON objects containing key/value pairs,
-# for example: { "uid": 1000, "gid": 1000, "home": "/home/user" }
-
-# key passdb {
-# key = passdb/%u
-# format = json
-# }
-# key userdb {
-# key = userdb/%u
-# format = json
-# }
-# key quota {
-# key = userdb/%u/quota
-# #format = value
-# # The default_value is used if the key isn't found. If default_value setting
-# # isn't specified at all (even as empty), the passdb/userdb lookup fails with
-# # "user doesn't exist".
-# default_value = 100M
-# }
-
-# Space separated list of keys whose values contain key/value paired objects.
-# All the key/value pairs inside the object are added as passdb fields.
-# passdb_objects = passdb
-
-#passdb_fields {
-#}
-
-# Userdb key/value object list.
-# userdb_objects = userdb
-
-# userdb_fields {
-# # dict: refers to key names
-# quota_rule = *:storage=%{dict:quota}
-#
-# # dict:. refers to the objkey inside (JSON) object
-# mail = maildir:%{dict:userdb.home}/Maildir
-# }
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-sql.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-sql.conf.ext.erb
deleted file mode 100644
index ad03dbc5ed47cddabc58a169c8a7dbf13fc1ccf2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-dict-sql.conf.ext.erb
+++ /dev/null
@@ -1,53 +0,0 @@
-# Generated by Chef
-
-# This file is commonly accessed via dict {} section in dovecot.conf
-
-<% unless @conf['dict_sql']['connect'].nil? -%>
-connect = <%= DovecotCookbook::Conf.value(@conf['dict_sql']['connect']) %>
-<% else -%>
-#connect = host=localhost dbname=mails user=testuser password=pass
-<% end -%>
-
-# CREATE TABLE quota (
-# username varchar(100) not null,
-# bytes bigint not null default 0,
-# messages integer not null default 0,
-# primary key (username)
-# );
-
-<% if @conf['dict_sql']['maps'].kind_of?(Array) -%>
-<% @conf['dict_sql']['maps'].each do |map| -%>
-<%= DovecotCookbook::Conf.map(map) %>
-<% end -%>
-<% else -%>
-map {
- pattern = priv/quota/storage
- table = quota
- username_field = username
- value_field = bytes
-}
-map {
- pattern = priv/quota/messages
- table = quota
- username_field = username
- value_field = messages
-}
-
-# CREATE TABLE expires (
-# username varchar(100) not null,
-# mailbox varchar(255) not null,
-# expire_stamp integer not null,
-# primary key (username, mailbox)
-# );
-
-map {
- pattern = shared/expire/$user/$mailbox
- table = expires
- value_field = expire_stamp
-
- fields {
- username = $user
- mailbox = $mailbox
- }
-}
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-ldap.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-ldap.conf.ext.erb
deleted file mode 100644
index e979cbaddbcf0a618a02d6cf934a5e5d1a01c212..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-ldap.conf.ext.erb
+++ /dev/null
@@ -1,145 +0,0 @@
-# Generated by Chef
-
-# This file is commonly accessed via passdb {} or userdb {} section in
-# conf.d/auth-ldap.conf.ext
-
-# This file is opened as root, so it should be owned by root and mode 0600.
-#
-# http://wiki2.dovecot.org/AuthDatabase/LDAP
-#
-# NOTE: If you're not using authentication binds, you'll need to give
-# dovecot-auth read access to userPassword field in the LDAP server.
-# With OpenLDAP this is done by modifying /etc/ldap/slapd.conf. There should
-# already be something like this:
-
-# access to attribute=userPassword
-# by dn="" read # add this
-# by anonymous auth
-# by self write
-# by * none
-
-# Space separated list of LDAP hosts to use. host:port is allowed too.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'hosts') %>
-
-# LDAP URIs to use. You can use this instead of hosts list. Note that this
-# setting isn't supported by all LDAP libraries.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'uris') %>
-
-# Distinguished Name - the username used to login to the LDAP server.
-# Leave it commented out to bind anonymously (useful with auth_bind=yes).
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'dn') %>
-
-# Password for LDAP server, if dn is specified.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'dnpass') %>
-
-# Use SASL binding instead of the simple binding. Note that this changes
-# ldap_version automatically to be 3 if it's lower. Also note that SASL binds
-# and auth_bind=yes don't work together.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'sasl_bind', false) %>
-# SASL mechanism name to use.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'sasl_mech') %>
-# SASL realm to use.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'sasl_realm') %>
-# SASL authorization ID, ie. the dnpass is for this "master user", but the
-# dn is still the logged in user. Normally you want to keep this empty.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'sasl_authz_id') %>
-
-# Use TLS to connect to the LDAP server.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls', false) %>
-# TLS options, currently supported only with OpenLDAP:
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_ca_cert_file') %>
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_ca_cert_dir') %>
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_cipher_suite') %>
-# TLS cert/key is used only if LDAP server requires a client certificate.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_cert_file') %>
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_key_file') %>
-# Valid values: never, hard, demand, allow, try
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'tls_require_cert') %>
-
-# Use the given ldaprc path.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'ldaprc_path') %>
-
-# LDAP library debug level as specified by LDAP_DEBUG_* in ldap_log.h.
-# -1 = everything. You may need to recompile OpenLDAP with debugging enabled
-# to get enough output.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'debug_level', 0) %>
-
-# Use authentication binding for verifying password's validity. This works by
-# logging into LDAP server using the username and password given by client.
-# The pass_filter is used to find the DN for the user. Note that the pass_attrs
-# is still used, only the password field is ignored in it. Before doing any
-# search, the binding is switched back to the default DN.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'auth_bind', false) %>
-
-# If authentication binding is used, you can save one LDAP request per login
-# if users' DN can be specified with a common template. The template can use
-# the standard %variables (see user_filter). Note that you can't
-# use any pass_attrs if you use this setting.
-#
-# If you use this setting, it's a good idea to use a different
-# dovecot-ldap.conf.ext for userdb (it can even be a symlink, just as long as
-# the filename is different in userdb's args). That way one connection is used
-# only for LDAP binds and another connection is used for user lookups.
-# Otherwise the binding is changed to the default DN before each user lookup.
-#
-# For example:
-# auth_bind_userdn = cn=%u,ou=people,o=org
-#
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'auth_bind_userdn') %>
-
-# LDAP protocol version to use. Likely 2 or 3.
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'ldap_version', 3) %>
-
-# LDAP base. %variables can be used here.
-# For example: dc=mail, dc=example, dc=org
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'base') %>
-
-# Dereference: never, searching, finding, always
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'deref', 'never') %>
-
-# Search scope: base, onelevel, subtree
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'scope', 'subtree') %>
-
-# User attributes are given in LDAP-name=dovecot-internal-name list. The
-# internal names are:
-# uid - System UID
-# gid - System GID
-# home - Home directory
-# mail - Mail location
-#
-# There are also other special fields which can be returned, see
-# http://wiki2.dovecot.org/UserDatabase/ExtraFields
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'user_attrs', 'homeDirectory=home,uidNumber=uid,gidNumber=gid') %>
-
-# Filter for user lookup. Some variables can be used (see
-# http://wiki2.dovecot.org/Variables for full list):
-# %u - username
-# %n - user part in user@domain, same as %u if there's no domain
-# %d - domain part in user@domain, empty if user there's no domain
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'user_filter', '(&(objectClass=posixAccount)(uid=%u))') %>
-
-# Password checking attributes:
-# user: Virtual user name (user@domain), if you wish to change the
-# user-given username to something else
-# password: Password, may optionally start with {type}, eg. {crypt}
-# There are also other special fields which can be returned, see
-# http://wiki2.dovecot.org/PasswordDatabase/ExtraFields
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'pass_attrs', 'uid=user,userPassword=password') %>
-
-# If you wish to avoid two LDAP lookups (passdb + userdb), you can use
-# userdb prefetch instead of userdb ldap in dovecot.conf. In that case you'll
-# also have to include user_attrs in pass_attrs field prefixed with "userdb_"
-# string. For example:
-#pass_attrs = uid=user,userPassword=password,\
-# homeDirectory=userdb_home,uidNumber=userdb_uid,gidNumber=userdb_gid
-
-# Filter for password lookups
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'pass_filter', '(&(objectClass=posixAccount)(uid=%u))') %>
-
-# Attributes and filter to get a list of all users
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'iterate_attrs', 'uid=user') %>
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'iterate_filter', '(objectClass=posixAccount)') %>
-
-# Default password scheme. "{scheme}" before password overrides this.
-# List of supported schemes is in: http://wiki2.dovecot.org/Authentication
-<%= DovecotCookbook::Conf.attribute(@conf['ldap'], 'default_pass_scheme', 'CRYPT') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-sql.conf.ext.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-sql.conf.ext.erb
deleted file mode 100644
index c73b1a8aae115ef3302d097cba653c1b8222de8f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot-sql.conf.ext.erb
+++ /dev/null
@@ -1,146 +0,0 @@
-# Generated by Chef
-
-# This file is commonly accessed via passdb {} or userdb {} section in
-# conf.d/auth-sql.conf.ext
-
-# This file is opened as root, so it should be owned by root and mode 0600.
-#
-# http://wiki2.dovecot.org/AuthDatabase/SQL
-#
-# For the sql passdb module, you'll need a database with a table that
-# contains fields for at least the username and password. If you want to
-# use the user@domain syntax, you might want to have a separate domain
-# field as well.
-#
-# If your users all have the same uig/gid, and have predictable home
-# directories, you can use the static userdb module to generate the home
-# dir based on the username and domain. In this case, you won't need fields
-# for home, uid, or gid in the database.
-#
-# If you prefer to use the sql userdb module, you'll want to add fields
-# for home, uid, and gid. Here is an example table:
-#
-# CREATE TABLE users (
-# username VARCHAR(128) NOT NULL,
-# domain VARCHAR(128) NOT NULL,
-# password VARCHAR(64) NOT NULL,
-# home VARCHAR(255) NOT NULL,
-# uid INTEGER NOT NULL,
-# gid INTEGER NOT NULL,
-# active CHAR(1) DEFAULT 'Y' NOT NULL
-# );
-
-# Database driver: mysql, pgsql, sqlite
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'driver') %>
-
-# Database connection string. This is driver-specific setting.
-#
-# HA / round-robin load-balancing is supported by giving multiple host
-# settings, like: host=sql1.host.org host=sql2.host.org
-#
-# pgsql:
-# For available options, see the PostgreSQL documention for the
-# PQconnectdb function of libpq.
-# Use maxconns=n (default 5) to change how many connections Dovecot can
-# create to pgsql.
-#
-# mysql:
-# Basic options emulate PostgreSQL option names:
-# host, port, user, password, dbname
-#
-# But also adds some new settings:
-# client_flags - See MySQL manual
-# ssl_ca, ssl_ca_path - Set either one or both to enable SSL
-# ssl_cert, ssl_key - For sending client-side certificates to server
-# ssl_cipher - Set minimum allowed cipher security (default: HIGH)
-# ssl_verify_server_cert - Verify that the name in the server SSL certificate
-# matches the host (default: no)
-# option_file - Read options from the given file instead of
-# the default my.cnf location
-# option_group - Read options from the given group (default: client)
-#
-# You can connect to UNIX sockets by using host: host=/var/run/mysql.sock
-# Note that currently you can't use spaces in parameters.
-#
-# sqlite:
-# The path to the database file.
-#
-# Examples:
-# connect = host=192.168.1.1 dbname=users
-# connect = host=sql.example.com dbname=virtual user=virtual password=blarg
-# connect = /etc/dovecot/authdb.sqlite
-#
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'connect') %>
-
-# Default password scheme.
-#
-# List of supported schemes is in
-# http://wiki2.dovecot.org/Authentication/PasswordSchemes
-#
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'default_pass_scheme', 'MD5') %>
-
-# passdb query to retrieve the password. It can return fields:
-# password - The user's password. This field must be returned.
-# user - user@domain from the database. Needed with case-insensitive lookups.
-# username and domain - An alternative way to represent the "user" field.
-#
-# The "user" field is often necessary with case-insensitive lookups to avoid
-# e.g. "name" and "nAme" logins creating two different mail directories. If
-# your user and domain names are in separate fields, you can return "username"
-# and "domain" fields instead of "user".
-#
-# The query can also return other fields which have a special meaning, see
-# http://wiki2.dovecot.org/PasswordDatabase/ExtraFields
-#
-# Commonly used available substitutions (see http://wiki2.dovecot.org/Variables
-# for full list):
-# %u = entire user@domain
-# %n = user part of user@domain
-# %d = domain part of user@domain
-#
-# Note that these can be used only as input to SQL query. If the query outputs
-# any of these substitutions, they're not touched. Otherwise it would be
-# difficult to have eg. usernames containing '%' characters.
-#
-# Example:
-# password_query = SELECT userid AS user, pw AS password \
-# FROM users WHERE userid = '%u' AND active = 'Y'
-#
-#password_query = \
-# SELECT username, domain, password \
-# FROM users WHERE username = '%n' AND domain = '%d'
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'password_query') %>
-
-# userdb query to retrieve the user information. It can return fields:
-# uid - System UID (overrides mail_uid setting)
-# gid - System GID (overrides mail_gid setting)
-# home - Home directory
-# mail - Mail location (overrides mail_location setting)
-#
-# None of these are strictly required. If you use a single UID and GID, and
-# home or mail directory fits to a template string, you could use userdb static
-# instead. For a list of all fields that can be returned, see
-# http://wiki2.dovecot.org/UserDatabase/ExtraFields
-#
-# Examples:
-# user_query = SELECT home, uid, gid FROM users WHERE userid = '%u'
-# user_query = SELECT dir AS home, user AS uid, group AS gid FROM users where userid = '%u'
-# user_query = SELECT home, 501 AS uid, 501 AS gid FROM users WHERE userid = '%u'
-#
-#user_query = \
-# SELECT home, uid, gid \
-# FROM users WHERE username = '%n' AND domain = '%d'
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'user_query') %>
-
-# If you wish to avoid two SQL lookups (passdb + userdb), you can use
-# userdb prefetch instead of userdb sql in dovecot.conf. In that case you'll
-# also have to return userdb fields in password_query prefixed with "userdb_"
-# string. For example:
-#password_query = \
-# SELECT userid AS user, password, \
-# home AS userdb_home, uid AS userdb_uid, gid AS userdb_gid \
-# FROM users WHERE userid = '%u'
-
-# Query to get a list of all usernames.
-#iterate_query = SELECT username AS user FROM users
-<%= DovecotCookbook::Conf.attribute(@conf['sql'], 'iterate_query') %>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot.conf.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot.conf.erb
deleted file mode 100644
index b7bc8294904a14649fb2554d2a808ac7656717f2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/dovecot.conf.erb
+++ /dev/null
@@ -1,116 +0,0 @@
-# Generated by Chef
-
-## Dovecot configuration file
-
-# If you're in a hurry, see http://wiki2.dovecot.org/QuickConfiguration
-
-# "doveconf -n" command gives a clean output of the changed settings. Use it
-# instead of copy&pasting files when posting to the Dovecot mailing list.
-
-# '#' character and everything after it is treated as comments. Extra spaces
-# and tabs are ignored. If you want to use either of these explicitly, put the
-# value inside quotes, eg.: key = "# char and trailing whitespace "
-
-# Most (but not all) settings can be overridden by different protocols and/or
-# source/destination IPs by placing the settings inside sections, for example:
-# protocol imap { }, local 127.0.0.1 { }, remote 10.0.0.0/8 { }
-
-# Default values are shown for each setting, it's not required to uncomment
-# those. These are exceptions to this though: No sections (e.g. namespace {})
-# or plugin settings are added by default, they're listed only as examples.
-# Paths are also just examples with the real defaults being based on configure
-# options. The paths listed here are for configure --prefix=/usr
-# --sysconfdir=/etc --localstatedir=/var
-
-# Protocols we want to be serving.
-<% unless @protocols.nil? -%>
-protocols = <%= DovecotCookbook::Conf.protocols(@protocols) %>
-<% else -%>
-#protocols = imap pop3 lmtp
-<% end -%>
-
-# A comma separated list of IPs or hosts where to listen in for connections.
-# "*" listens in all IPv4 interfaces, "::" listens in all IPv6 interfaces.
-# If you want to specify non-default ports or anything more complex,
-# edit conf.d/master.conf.
-<%= DovecotCookbook::Conf.attribute(@conf, 'listen', '*, ::') %>
-
-# Base directory where to store runtime data.
-<%= DovecotCookbook::Conf.attribute(@conf, 'base_dir', '/var/run/dovecot/') %>
-
-# Name of this instance. In multi-instance setup doveadm and other commands
-# can use -i to select which instance is used (an alternative
-# to -c ). The instance name is also added to Dovecot processes
-# in ps output.
-<%= DovecotCookbook::Conf.attribute(@conf, 'instance_name', 'dovecot') %>
-
-# Greeting message for clients.
-<%= DovecotCookbook::Conf.attribute(@conf, 'login_greeting', 'Dovecot ready.') %>
-
-# Space separated list of trusted network ranges. Connections from these
-# IPs are allowed to override their IP addresses and ports (for logging and
-# for authentication checks). disable_plaintext_auth is also ignored for
-# these networks. Typically you'd specify your IMAP proxy servers here.
-<%= DovecotCookbook::Conf.attribute(@conf, 'login_trusted_networks') %>
-
-# Space separated list of login access check sockets (e.g. tcpwrap)
-<%= DovecotCookbook::Conf.attribute(@conf, 'login_access_sockets') %>
-
-# With proxy_maybe=yes if proxy destination matches any of these IPs, don't do
-# proxying. This isn't necessary normally, but may be useful if the destination
-# IP is e.g. a load balancer's IP.
-<%= DovecotCookbook::Conf.attribute(@conf, 'auth_proxy_self') %>
-
-# Show more verbose process titles (in ps). Currently shows user name and
-# IP address. Useful for seeing who are actually using the IMAP processes
-# (eg. shared mailboxes or if same uid is used for multiple accounts).
-<%= DovecotCookbook::Conf.attribute(@conf, 'verbose_proctitle', false) %>
-
-# Should all processes be killed when Dovecot master process shuts down.
-# Setting this to "no" means that Dovecot can be upgraded without
-# forcing existing client connections to close (although that could also be
-# a problem if the upgrade is e.g. because of a security fix).
-<%= DovecotCookbook::Conf.attribute(@conf, 'shutdown_clients', true) %>
-
-# If non-zero, run mail commands via this many connections to doveadm server,
-# instead of running them directly in the same process.
-<%= DovecotCookbook::Conf.attribute(@conf, 'doveadm_worker_count', 0) %>
-# UNIX socket or host:port used for connecting to doveadm server
-<%= DovecotCookbook::Conf.attribute(@conf, 'doveadm_socket_path', 'doveadm-server') %>
-
-# Space separated list of environment variables that are preserved on Dovecot
-# startup and passed down to all of its child processes. You can also give
-# key=value pairs to always set specific settings.
-<%= DovecotCookbook::Conf.attribute(@conf, 'import_environment', 'TZ') %>
-
-##
-## Dictionary server settings
-##
-
-# Dictionary can be used to store key=value lists. This is used by several
-# plugins. The dictionary can be accessed either directly or though a
-# dictionary server. The following dict block maps dictionary names to URIs
-# when the server is used. These can then be referenced using URIs in format
-# "proxy::".
-
-<% if @conf['dict'].kind_of?(Hash) -%>
-dict {
-<% @conf['dict'].sort.each do |key, value| -%>
- <%= key %> = <%= DovecotCookbook::Conf.value(value) %>
-<% end -%>
-}
-<% else -%>
-dict {
- #quota = mysql:/etc/dovecot/dovecot-dict-sql.conf.ext
- #expire = sqlite:/etc/dovecot/dovecot-dict-sql.conf.ext
-}
-<% end -%>
-
-# Most of the actual configuration gets included below. The filenames are
-# first sorted by their ASCII value and parsed in that order. The 00-prefixes
-# in filenames are intended to make it easier to understand the ordering.
-!include conf.d/*.conf
-
-# A config file can also tried to be included without giving an error if
-# it's not found:
-!include_try local.conf
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai7_plugins/dovecot.rb.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai7_plugins/dovecot.rb.erb
deleted file mode 100644
index 3b0c664d06911c073f6c6a7f6940bc2349e9d609..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai7_plugins/dovecot.rb.erb
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-# Author:: Xabier de Zuazo ()
-#
-# Copyright 2014, Onddo Labs, Sl.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-Ohai.plugin(:Dovecot) do
- ENABLE_BUILD_OPTIONS = <%= @enable_build_options.eql?(true).to_s %>
-
- provides 'dovecot', 'dovecot/version'
- provides 'dovecot/build-options' if ENABLE_BUILD_OPTIONS
-
- def init_dovecot
- dovecot Mash.new
- dovecot['version'] = nil
- dovecot['build-options'] = {} if ENABLE_BUILD_OPTIONS
- dovecot
- end
-
- def build_option_key(str)
- str.downcase.tr(' ', '-')
- end
-
- def parse_build_options_hash(build_options)
- Hash[build_options.split(/ +/).map do |value|
- value.index('=').nil? ? [value, true] : value.split('=', 2)
- end]
- end
-
- def parse_build_options_array(build_options)
- build_options.split(/ +/)
- end
-
- def collect_version(stdout)
- dovecot['version'] = stdout.split("\n")[0]
- end
-
- def collect_build_options_line(line)
- case line
- when /^Build options: *(.+)/
- dovecot['build-options']
- .merge!(parse_build_options_hash(Regexp.last_match[1]))
- when /^([^:]+): *(.+)/
- dovecot['build-options'][build_option_key(Regexp.last_match[1])] =
- parse_build_options_array(Regexp.last_match[2])
- end
- end
-
- def collect_build_options(stdout)
- stdout.each_line { |line| collect_build_options_line(line) }
- end
-
- collect_data do
- init_dovecot
- so = shell_out('dovecot --version')
- collect_version(so.stdout) if so.exitstatus == 0
-
- if ENABLE_BUILD_OPTIONS
- so_bo = shell_out('dovecot --build-options')
- collect_build_options(so_bo.stdout) if so_bo.exitstatus == 0
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai_plugins/dovecot.rb.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai_plugins/dovecot.rb.erb
deleted file mode 100644
index e236934329863413640f93601b6c5b4d2d32cf96..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/ohai_plugins/dovecot.rb.erb
+++ /dev/null
@@ -1,64 +0,0 @@
-#
-# Author:: Xabier de Zuazo ()
-#
-# Copyright 2013, Onddo Labs, Sl.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-provides 'dovecot'
-provides 'dovecot/version'
-
-def build_option_key(str)
- str.downcase.tr(' ', '-')
-end
-
-def parse_build_options_hash(build_options)
- Hash[build_options.split(/ +/).map do |value|
- value.index('=').nil? ? [value, true] : value.split('=', 2)
- end]
-end
-
-def parse_build_options_array(build_options)
- build_options.split(/ +/)
-end
-
-dovecot Mash.new unless dovecot
-dovecot['version'] = nil unless dovecot['version']
-
-status, stdout, _stderr =
- run_command(no_status_check: true, command: 'dovecot --version')
-dovecot['version'] = stdout.split("\n")[0] if status == 0
-
-<% if @enable_build_options -%>
-
-provides 'dovecot/build-options'
-
-dovecot['build-options'] = {} unless dovecot['build-options']
-
-status, stdout, _stderr =
- run_command(no_status_check: true, command: 'dovecot --build-options')
-if status == 0
- stdout.split("\n").each do |line|
- case line
- when /^Build options: *(.+)/
- dovecot['build-options']
- .merge!(parse_build_options_hash(Regexp.last_match[1]))
- when /^([^:]+): *(.+)/
- dovecot['build-options'][build_option_key(Regexp.last_match[1])] =
- parse_build_options_array(Regexp.last_match[2])
- end
- end
-end
-
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/dovecot/templates/default/password.erb b/lc-gdn-chef/cookbooks/dovecot/templates/default/password.erb
deleted file mode 100644
index 5cf5b7829e117aee4bf83ad330eef0ef5318ce56..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/dovecot/templates/default/password.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-<% @credentials.each do |user, password, uid, gid, gecos, homedir, shell, extra_fields| -%>
-<%= user %>:<%= password %>:<%= uid %>:<%= gid %>::<%= homedir %>::<%= extra_fields %>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/firewall/CHANGELOG.md b/lc-gdn-chef/cookbooks/firewall/CHANGELOG.md
deleted file mode 100644
index f632cd24bc97596a8db776accaf6e7af66b1b002..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/CHANGELOG.md
+++ /dev/null
@@ -1,409 +0,0 @@
-# firewall Cookbook CHANGELOG
-
-This file is used to list changes made in each version of the firewall cookbook.
-
-## 6.0.0 - *2022-05-09*
-
-- Values for firewalld resources must be specified as one would specify them to `firewall-cmd`.
-- Do not use begin/rescue blocks when adding firewalld-objects, as that resulted in errors being logged by firewalld.
-- Various bug fixes that were found along the way.
-
-## 5.1.0 - *2022-05-07*
-
-- Add new providers for firewalld using the dbus-interface of firewalld.
-
-## 5.0.0 - *2022-04-20*
-
-- Add support for nftables
-
-## 4.0.3 - *2022-04-11*
-
-- Use resuable workflows instead of Chef Delivery
-
-## 4.0.2 - *2022-02-17*
-
-- Standardise files with files in sous-chefs/repo-management
-- Remove delivery folder
-
-## 4.0.1 - *2022-01-07*
-
-- Remove extraneous task file that's no longer needed
-
-## 4.0.0 - *2021-09-09*
-
-- Remove dependency on chef-sugar cookbook
-- Bump to require Chef Infra Client >= 15.5 for chef-utils
-- Update metadata and README to Sous Chefs
-
-## 3.0.2 - *2021-08-30*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 3.0.1 - *2021-07-08*
-
-- Restart netfilter service in iptables mode after updating firewall rules
-
-## 3.0.0 - *2021-06-14*
-
-- Add Amazon Linux support
-- Fix firewall resource actions list
-- First attempt to modernize testing
-- Various Cookstyle fixes
-
-## 2.7.1 - *2021-06-01*
-
-- resolved cookstyle error: libraries/helpers_windows.rb:47:9 convention: `Style/RedundantAssignment`
-- resolved cookstyle error: libraries/helpers_windows.rb:48:9 convention: `Layout/IndentationWidth`
-- resolved cookstyle error: libraries/helpers_windows.rb:49:16 convention: `Layout/ElseAlignment`
-- resolved cookstyle error: libraries/helpers_windows.rb:50:9 convention: `Layout/IndentationWidth`
-- resolved cookstyle error: libraries/helpers_windows.rb:51:16 warning: `Layout/EndAlignment`
-- resolved cookstyle error: libraries/helpers_windows.rb:52:1 convention: `Layout/EmptyLinesAroundMethodBody`
-- resolved cookstyle error: libraries/helpers_windows.rb:52:1 convention: `Layout/TrailingWhitespace`
-- resolved cookstyle error: libraries/provider_firewall_firewalld.rb:30:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_firewalld.rb:54:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_firewalld.rb:114:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_firewalld.rb:136:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_firewalld.rb:149:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables.rb:33:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables.rb:63:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables.rb:112:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables.rb:134:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu.rb:34:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu.rb:67:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu.rb:133:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu.rb:156:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu1404.rb:34:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu1404.rb:67:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu1404.rb:133:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_iptables_ubuntu1404.rb:156:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_rule.rb:24:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_ufw.rb:32:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_ufw.rb:61:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_ufw.rb:102:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_ufw.rb:115:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_windows.rb:29:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_windows.rb:42:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_windows.rb:97:5 refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: libraries/provider_firewall_windows.rb:118:5
- refactor: `ChefModernize/ActionMethodInResource`
-- resolved cookstyle error: attributes/iptables.rb:8:54 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:8:54 convention: `Style/StringLiteralsInInterpolation`
-- resolved cookstyle error: attributes/iptables.rb:8:63 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:8:64 convention: `Style/StringLiteralsInInterpolation`
-- resolved cookstyle error: attributes/iptables.rb:9:56 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:9:56 convention: `Style/StringLiteralsInInterpolation`
-- resolved cookstyle error: attributes/iptables.rb:9:65 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:9:66 convention: `Style/StringLiteralsInInterpolation`
-- resolved cookstyle error: attributes/iptables.rb:10:55 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:10:55 convention: `Style/StringLiteralsInInterpolation`
-- resolved cookstyle error: attributes/iptables.rb:10:64 refactor: `ChefStyle/AttributeKeys`
-- resolved cookstyle error: attributes/iptables.rb:10:65 convention: `Style/StringLiteralsInInterpolation`
-
-## 2.7.0 (2018-12-19)
-
-- Nominal support for Debian 9 (#202)
-
-## 2.6.5 (2018-07-24)
-
-- use platform_family instead of platform to include all rhels
-
-## v2.6.4 (2018-07-01)
-
-- Stop including chef-sugar when it's >= 4.0.0 (#197)
-
-## v2.6.3 (2018-02-01)
-
-- Fix issue with deep merging of hashes and arrays in recent chef release (#185)
-
-## v2.6.2 (2017-06-01)
-
-- Incorrect file checking on Ubuntu, double file write (#173)
-- Added testing on CentOS 6.9
-- Clarify metadata that we're not working on Amazon Linux (#172)
-
-## v2.6.1 (2017-04-21)
-
-- Add recipe to disable firewall (#164)
-
-## v2.6.0 (2017-04-17)
-
-- Initial Chef 13.x support (#160, #159)
-- Allow loopback and icmp, when enabled (#161)
-- Address various newer rubocop and foodcritic complaints
-- Convert rule provider away from DSL (#159)
-
-## v2.5.4 (2017-02-13)
-
-- Update Test Kitchen platforms to the latest
-- Update copyright headers
-- Allow package options to be passed through to the package install for firewall
-- Define policy for Windows Firewall and use the attributes to set desired policy
-
-## v2.5.3 (2016-10-26)
-
-- Don't show firewall resource as updated (#133)
-- Add :off as a valid logging level (#129)
-- Add support for Ubuntu 16.04 (#149)
-
-## v2.5.2 (2016-06-02)
-
-- Don't issue commands when firewalld isn't active (#140)
-- Install iptables-services on CentOS >= 7 (#131)
-- Update Ruby version on Travis for listen gem
-
-## v2.5.1 (2016-05-31)
-
-- Protocol guard incorrectly prevents "none" protocol type on UFW helper (#128)
-- Fix wrongly ordered conditional for converting ports to strings using port_to_s
-- Fix notify_firewall attribute crashing firewall_rule provider (#130)
-- Add warning if firewall rule opens all traffic (#132)
-- Add ipv6 attribute respect to Ubuntu iptables (#138)
-
-## v2.5.0 (2016-03-08)
-
-- Don't modify parameter for port (#120)
-- Remove a reference to the wrong variable name under windows (#123)
-- Add support for mobile shell default firewall rule (#121)
-- New rubocop rules and style fixes
-- Correct a README.md example for `action :allow`
-
-## v2.4.0 (2016-01-28)
-
-- Expose default iptables ruleset so that raw rules can be used in conjunction with rulesets for other tables (#101).
-
-## v2.3.1 (2016-01-08)
-
-- Add raw rule support to the ufw firewall provider (#113).
-
-## v2.3.0 (2015-12-23)
-
-- Refactor logic so that firewall rules don't add a string rule to the firewall when their actions run. Just run the
- action once on the firewall itself. This is designed to prevent partial application of rules (#106)
-
-- Switch to "enabled" (positive logic) instead of "disabled" (negative logic) on the firewall resource. It was difficult
- to reason with "disabled false" for some complicated recipes using firewall downstream. `disabled` is now deprecated.
-
-- Add proper Windows testing and serverspec tests back into this cookbook.
-
-- Fix the `port_to_s` function so it also works for Windows (#111)
-
-- Fix typo checking action instead of command in iptables helper (#112)
-
-- Remove testing ranges of ports on CentOS 5.x, as it's broken there.
-
-## v2.2.0 (2015-11-02)
-
-Added permanent as default option for RHEL 7 based systems using firewall-cmd. This defaults to turned off, but it will
-be enabled by default on the next major version bump.
-
-## v2.1.0 (2015-10-15)
-
-Minor feature release.
-
-- Ensure ICMPv6 is open when `['firewall']['allow_established']` is set to true (the default). ICMPv6 is critical for
- most IPv6 operations.
-
-## v2.0.5 (2015-10-05)
-
-Minor bugfix release.
-
-- Ensure provider filtering always yields 1 and only 1 provider, #97 & #98
-- Documentation update #96
-
-## v2.0.4 (2015-09-23)
-
-Minor bugfix release.
-
-- Allow override of filter chain policies, #94
-- Fix foodcrtitic and chefspec errors
-
-## v2.0.3 (2015-09-14)
-
-Minor bugfix release.
-
-- Fix wrong conditional for firewalld ports, #93
-- Fix ipv6 command logic under iptables, #91
-
-## v2.0.2 (2015-09-08)
-
-- Release with working CI, Chefspec matchers.
-
-## v2.0.1 (2015-09-01)
-
-- Add default related/established rule for iptables
-
-## v2.0.0 (2015-08-31)
-
-- 84, major rewrite
- - Allow relative positioning of rules
- - Use delayed notifications to create one firewall ruleset instead of incremental changes
- - Remove poise dependency
-- # 82 - Introduce Windows firewall support and test-kitchen platform
-- # 73 - Add the option to disable ipv6 commands on iptables
-- # 78 - Use Chef-12 style `provides` to address provider mapping issues
-- Rubocop and foodcritic cleanup
-
-## v1.6.1 (2015-07-24)
-
-- 80 - Remove an extra space in port range
-
-## v1.6.0 (2015-07-15)
-
-- 68 - Install firewalld when it does not exist
-- 72 - Fix symbol that was a string, breaking comparisons
-
-## v1.5.2 (2015-07-15)
-
-- 75 - Use correct service in iptables save action, Add serverspec tests for iptables suite
-
-## v1.5.1 (2015-07-13)
-
-- 74 - add :save matcher for Chefspec
-
-## v1.5.0 (2015-07-06)
-
-- 70 - Add chef service resource to ensure firewall-related services are enabled/disabled
- - Add testing and support for iptables on ubuntu in iptables provider
-
-## v1.4.0 (2015-06-30)
-
-- 69 - Support for CentOS/RHEL 5.x
-
-## v1.3.0 (2015-06-09)
-
-- 63 - Add support for protocol numbers
-
-## v1.2.0 (2015-05-28)
-
-- 64 - Support the newer version of poise
-
-## v1.1.2 (2015-05-19)
-
-- 60 - Always add /32 or /128 to ipv4 or ipv6 addresses, respectively - Make comment quoting optional; iptables on
- Ubuntu strips quotes on strings without any spaces
-
-## v1.1.1 (2015-05-11)
-
-- 57 - Suppress warning: already initialized constant XXX while Chefspec
-
-## v1.1.0 (2015-04-27)
-
-- 56 - Better ipv6 support for firewalld and iptables
-- 54 - Document raw parameter
-
-## v1.0.2 (2015-04-03)
-
-- 52 - Typo in :masquerade action name
-
-## v1.0.1 (2015-03-28)
-
-- 49 - Fix position attribute of firewall_rule providers to be correctly used as a string in commands
-
-## v1.0.0 (2015-03-25)
-
-- Major upgrade and rewrite as HWRP using poise
-- Adds support for iptables and firewalld
-- Modernize tests and other files
-- Fix many bugs from ufw defaults to multiport suppot
-
-## v0.11.8 (2014-05-20)
-
-- Corrects issue where on a secondary converge would not distinguish between inbound and outbound rules
-
-## v0.11.6 (2014-02-28)
-
-[COOK-4385] - UFW provider is broken
-
-## v0.11.4 (2014-02-25)
-
-[COOK-4140] Only notify when a rule is actually added
-
-## v0.11.2
-
-### Bug
-
-- **[COOK-3615](https://tickets.opscode.com/browse/COOK-3615)** - Install required UFW package on Debian
-
-## v0.11.0
-
-### Improvement
-
-- [COOK-2932]: ufw providers work on debian but cannot be used
-
-## v0.10.2
-
-- [COOK-2250] - improve readme
-
-## v0.10.0
-
-- [COOK-1234] - allow multiple ports per rule
-
-## v0.9.2
-
-- [COOK-1615] - Firewall example docs have incorrect direction syntax
-
-## v0.9.0
-
-The default action for firewall LWRP is now :enable, the default action for firewall_rule LWRP is now :reject. This is
-in line with a "default deny" policy.
-
-- [COOK-1429] - resolve foodcritic warnings
-
-## v0.8.0
-
-- refactor all resources and providers into LWRPs
-- removed :reset action from firewall resource (couldn't find a good way to make it idempotent)
-- removed :logging action from firewall resource...just set desired level via the log_level attribute
-
-## v0.6.0
-
-- [COOK-725] Firewall cookbook firewall_rule LWRP needs to support logging attribute.
-- Firewall cookbook firewall LWRP needs to support :logging
-
-## v0.5.7
-
-- [COOK-696] Firewall cookbook firewall_rule LWRP needs to support interface
-- [COOK-697] Firewall cookbook firewall_rule LWRP needs to support the direction for the rules
-
-## v0.5.6
-
-- [COOK-695] Firewall cookbook firewall_rule LWRP needs to support destination port
-
-## v0.5.5
-
-- [COOK-709] fixed :nothing action for the 'firewall_rule' resource.
-
-## v0.5.4
-
-- [COOK-694] added :reject action to the 'firewall_rule' resource.
-
-## v0.5.3
-
-- [COOK-698] added :reset action to the 'firewall' resource.
-
-## v0.5.2
-
-- Add missing 'requires' statements. fixes 'NameError: uninitialized constant' error. Thanks to Ernad Husremović for the
- fix.
-
-## v0.5.0
-
-- [COOK-686] create firewall and firewall_rule resources
-- [COOK-687] create UFW providers for all resources
diff --git a/lc-gdn-chef/cookbooks/firewall/LICENSE b/lc-gdn-chef/cookbooks/firewall/LICENSE
deleted file mode 100644
index 8f71f43fee3f78649d238238cbde51e6d7055c82..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
diff --git a/lc-gdn-chef/cookbooks/firewall/README.md b/lc-gdn-chef/cookbooks/firewall/README.md
deleted file mode 100644
index a7830a091b7e34c305444ebce4850d01bbd4036e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/README.md
+++ /dev/null
@@ -1,398 +0,0 @@
-# firewall Cookbook
-
-[](https://supermarket.chef.io/cookbooks/firewall)
-[](https://github.com/sous-chefs/firewall/actions?query=workflow%3Aci)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-Provides a set of primitives for managing firewalls and associated rules.
-
-PLEASE NOTE - The resource/providers in this cookbook are under heavy development. An attempt is being made to keep the
-resource simple/stupid by starting with less sophisticated firewall implementations first and refactor/vet the resource
-definition with each successive provider.
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working
-together to maintain important cookbooks. If you’d like to know more please
-visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack
-in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-## Requirements
-
-- Chef Infra Client 15.5+
-
-```
-depends 'firewall'
-```
-
-### Supported firewalls and platforms
-
-- UFW - Ubuntu, Debian (except 9)
-- IPTables - Red Hat & CentOS, Ubuntu
-- FirewallD - Red Hat & CentOS >= 7.0 (IPv4 only
- support, [needs contributions/testing](https://github.com/chef-cookbooks/firewall/issues/86))
-- Windows Advanced Firewall - 2012 R2
-- nftables
-
-Tested on:
-
-- Ubuntu 16.04 with iptables, ufw
-- Debian 9 with iptables
-- Debian 11 with nftables
-- Debian 11 with new resources for firewalld
-- CentOS 6 with iptables
-- CentOS 7.1 with firewalld
-- Windows Server 2012r2 with Windows Advanced Firewall
-
-By default, Ubuntu chooses ufw. To switch to iptables, set this in an attribute file:
-
-```
-default['firewall']['ubuntu_iptables'] = true
-```
-
-By default, Red Hat & CentOS >= 7.0 chooses firewalld. To switch to iptables, set this in an attribute file:
-
-```
-default['firewall']['redhat7_iptables'] = true
-```
-
-In order to use nftables, just use the resource `nftables` and
-`nftables_rule`. These resources are written in more modern design styles and are not configurable by node attributes.
-
-## Considerations that apply to all firewall providers and resources
-
-This cookbook comes with two resources, firewall and firewall rule. The typical usage scenario is as follows:
-
-- run the `:install` action on the `firewall` resource named 'default', which installs appropriate packages and
- configures services to start on boot and starts them
-- run the `:create` action on every `firewall_rule` resource, which adds to the list of rules that should be configured
- on the firewall. `firewall_rule` then automatically sends a delayed notification to the `firewall['default']` resource
- to run the `:restart` action.
-- run the delayed notification with action `:restart` on the `firewall` resource. if any rules are different than the
- last run, the provider will update the current state of the firewall rules to match the expected rules.
-
-There is a fundamental mismatch between the idea of a chef action and the action that should be taken on a firewall
-rule. For this reason, the chef action for a firewall_rule may be `:nothing` (the rule should not be present in the
-firewall) or `:create` (the rule should be present in the firewall), but the action taken on a packet in a
-firewall (`DROP`, `ACCEPT`, etc) is denoted as a `command` parameter on the `firewall_rule` resource.
-
-The same points hold for the `nftables`- and `nftables_rule`-resources.
-
-## iptables considerations
-
-If you need to use a table other than `*filter`, the best way to do so is like so:
-
-```
-node.default['firewall']['iptables']['defaults'][:ruleset] = {
- '*filter' => 1,
- ':INPUT DROP' => 2,
- ':FORWARD DROP' => 3,
- ':OUTPUT ACCEPT_FILTER' => 4,
- 'COMMIT_FILTER' => 100,
- '*nat' => 101,
- ':PREROUTING DROP' => 102,
- ':POSTROUTING DROP' => 103,
- ':OUTPUT ACCEPT_NAT' => 104,
- 'COMMIT_NAT' => 200
-}
-```
-
-Note -- in order to support multiple hash keys containing the same rule, anything found after the underscore will be
-stripped for: `:OUTPUT :INPUT :POSTROUTING :PREROUTING COMMIT`. This allows an example like the above to be reduced to
-just repeated lines of `COMMIT` and `:OUTPUT ACCEPT` while still avoiding duplication of other things.
-
-Then it's trivial to add additional rules to the `*nat` table using the raw parameter:
-
-```
-firewall_rule "postroute" do
- raw "-A POSTROUTING -o eth1 -p tcp -d 172.28.128.21 -j SNAT --to-source 172.28.128.6"
- position 150
-end
-```
-
-Note that any line starting with `COMMIT` will become just `COMMIT`, as hash keys must be unique but we need multiple
-commit lines.
-
-## nftables
-
-Please read the documentation for the
-[`nftables` resource](documentation/resource_nftables.md) and the
-[`nftables_rule` resource](documentation/resource_nftables_rule.md)
-
-## Recipes
-
-### default
-
-The default recipe creates a firewall resource with action install.
-
-### disable_firewall
-
-Used to disable platform specific firewall. Many clouds have their own firewall configured outside of the OS instance
-such as AWS Security Groups.
-
-## Attributes
-
-- `default['firewall']['allow_ssh'] = false`, set true to open port 22 for SSH when the default recipe runs
-- `default['firewall']['allow_mosh'] = false`, set to true to open UDP ports 60000 - 61000 for [Mosh][0] when the
- default recipe runs
-- `default['firewall']['allow_winrm'] = false`, set true to open port 5989 for WinRM when the default recipe runs
-- `default['firewall']['allow_loopback'] = false`, set to true to allow all traffic on the loopback interface
-- `default['firewall']['allow_icmp'] = false`, set true to allow icmp protocol on supported OSes (note: ufw and windows
- implementations don't support this)
-- `default['firewall']['ubuntu_iptables'] = false`, set to true to use iptables on Ubuntu / Debian when using the
- default recipe
-- `default['firewall']['redhat7_iptables'] = false`, set to true to use iptables on Red Hat / CentOS 7 when using the
- default recipe
-- `default['firewall']['ufw']['defaults']` hash for template `/etc/default/ufw`
-- `default['firewall']['iptables']['defaults']` hash for default policies for 'filter' table's chains`
-- `default['firewall']['windows']['defaults']` hash to define inbound / outbound firewall policy on Windows platform
-- `default['firewall']['allow_established'] = true`, set to false if you don't want a related/established default rule
- on iptables
-- `default['firewall']['ipv6_enabled'] = true`, set to false if you don't want IPv6 related/established default rule on
- iptables (this enables ICMPv6, which is required for much of IPv6 communication)
-- `default['firewall']['firewalld']['permanent'] = false`, set to true if you want firewalld rules to be added
- with `--permanent` so they survive a reboot. This will be changed to `true` by default in a future major version
- release.
-
-## Resources
-
-There is a separate folder for [`firewalld` resources](documentation/README.md).
-
-### firewall
-
-***NB***: The name 'default' of this resource is important as it is used for firewall_rule providers to locate the
-firewall resource. If you change it, you must also supply the same value to any firewall_rule resources using
-the `firewall_name` parameter.
-
-#### Actions
-
-- `:install` (*default action*): Install and Enable the firewall. This will ensure the appropriate packages are
- installed and that any services have been started.
-- `:disable`: Disable the firewall. Drop any rules and put the node in an unprotected state. Flush all current rules.
- Also erase any internal state used to detect when rules should be applied.
-- `:flush`: Flush all current rules. Also erase any internal state used to detect when rules should be applied.
-- `:save`: Ensure all rules are added permanently under firewalld using `--permanent`. Not supported on ufw, iptables.
- You must notify this action at the end of the chef run if you want permanent firewalld rules (they are not persistent
- by default).
-
-#### Parameters
-
-- `disabled` (default to `false`): If set to true, all actions will no-op on this resource. This is a way to prevent
- included cookbooks from configuring a firewall.
-- `ipv6_enabled` (default to `true`): If set to false, firewall will not perform any ipv6 related work. Currently only
- supported in iptables.
-- `log_level`: UFW only. Level of verbosity the firewall should log at. valid values are: :low, :medium, :high, :full, :
- off. default is :low.
-- `rules`: This is used internally for firewall_rule resources to append their rules. You should NOT touch this value
- unless you plan to supply an entire firewall ruleset at once, and skip using firewall_rule resources.
-- `disabled_zone` (firewalld only): The zone to set on firewalld when the firewall should be disabled. Can be any string
- in symbol form, e.g. :public, :drop, etc. Defaults to `:public.`
-- `enabled_zone` (firewalld only): The zone to set on firewalld when the firewall should be enabled. Can be any string
- in symbol form, e.g. :public, :drop, etc. Defaults to `:drop.`
-- `package_options`: Used to pass options to the package install of firewall
-
-```ruby
-# all defaults
-firewall 'default'
-
-# enable platform default firewall
-firewall 'default' do
- action :install
-end
-
-# increase logging past default of 'low'
-firewall 'default' do
- log_level :high
- action :install
-end
-```
-
-### firewall_rule
-
-#### Actions
-
-- `:create` (_default action_): If a firewall_rule runs this action, the rule will be recorded in a chef resource's
- internal state, and applied when providers automatically notify the firewall resource with action `:reload`. The
- notification happens automatically.
-
-#### Parameters
-
-- `firewall_name`: the matching firewall resource that this rule applies to. Default value: `default`
-- `raw`: Used to pass an entire rule as a string, omitting all other parameters. This line will be directly loaded
- by `iptables-restore`, fed directly into `ufw` on the command line, or run using `firewall-cmd`.
-- `description` (_default: same as rule name_): Used to provide a comment that will be included when adding the firewall
- rule.
-- `include_comment` (_default: true_): Used to optionally exclude the comment in the rule.
-- `position` (_default: 50_): **relative** position to insert rule at. Position may be any integer between 0 < n < 100 (
- exclusive), and more than one rule may specify the same position.
-- `command`: What action to take on a particular packet
- - `:allow` (_default action_): the rule should allow matching packets
- - `:deny`: the rule should deny matching packets
- - `:reject`: the rule should reject matching packets
- - `:masqerade`: Masquerade the matching packets
- - `:redirect`: Redirect the matching packets
- - `:log`: Configure logging
-- `stateful`: a symbol or array of symbols, such as ``[:related, :established]` that will be passed to the state module
- in iptables or firewalld.
-- `protocol`: `:tcp` (_default_), `:udp`, `:icmp`, `:none` or protocol number. Using protocol numbers is not supported
- using the ufw provider (default for debian/ubuntu systems).
-- `direction`: For ufw, direction of the rule. valid values are: `:in` (_default_), `:out`, `:pre`, `:post`.
-- `source` (_Default is `0.0.0.0/0` or `Anywhere`_): source ip address or subnet to filter.
-- `source_port` (_Default is nil_): source port for filtering packets.
-- `destination`: ip address or subnet to filter on packet destination, must be a valid IP
-- `port` or `dest_port`: target port number (ie. 22 to allow inbound SSH), or an array of incoming port numbers (
- ie. [80,443] to allow inbound HTTP & HTTPS). NOTE: `protocol` attribute is required with multiple ports, or a range of
- incoming port numbers (ie. 60000..61000 to allow inbound mobile-shell. NOTE: `protocol`, or an attribute is required
- with a range of ports.
-- `interface`: (source) interface to apply rule (ie. `eth0`).
-- `dest_interface`: interface where packets may be destined to go
-- `redirect_port`: redirected port for rules with command `:redirect`
-- `logging`: may be added to enable logging for a particular rule. valid values are: `:connections`, `:packets`. In the
- ufw provider, `:connections` logs new connections while `:packets` logs all packets.
-
-```ruby
-# open standard ssh port
-firewall_rule 'ssh' do
- port 22
- command :allow
-end
-
-# open standard http port to tcp traffic only; insert as first rule
-firewall_rule 'http' do
- port 80
- protocol :tcp
- position 1
- command :allow
-end
-
-# restrict port 13579 to 10.0.111.0/24 on eth0
-firewall_rule 'myapplication' do
- port 13579
- source '10.0.111.0/24'
- direction :in
- interface 'eth0'
- command :allow
-end
-
-# specify a protocol number (supported on centos/redhat)
-firewall_rule 'vrrp' do
- protocol 112
- command :allow
-end
-
-# use the iptables provider to specify protocol number on debian/ubuntu
-firewall_rule 'vrrp' do
- provider Chef::Provider::FirewallRuleIptables
- protocol 112
- command :allow
-end
-
-# can use :raw command with UFW provider for VRRP
-firewall_rule "VRRP" do
- command :allow
- raw "allow to 224.0.0.18"
-end
-
-# open UDP ports 60000..61000 for mobile shell (mosh.mit.edu), note
-# that the protocol attribute is required when using port_range
-firewall_rule 'mosh' do
- protocol :udp
- port 60000..61000
- command :allow
-end
-
-# open multiple ports for http/https, note that the protocol
-# attribute is required when using ports
-firewall_rule 'http/https' do
- protocol :tcp
- port [80, 443]
- command :allow
-end
-
-firewall 'default' do
- enabled false
- action :nothing
-end
-```
-
-#### Providers
-
-- See `libraries/z_provider_mapping.rb` for a full list of providers for each platform and version.
-
-Different providers will determine the current state of the rules differently -- parsing the output of a command,
-maintaining the state in a file, or some other way. If the firewall is adjusted from outside of chef (non-idempotent),
-it's possible that chef may be caught unaware of the current state of the firewall. The best workaround is to add
-a `:flush` action to the firewall resource as early as possible in the chef run, if you plan to modify the firewall
-state outside of chef.
-
-## Troubleshooting
-
-To figure out what the position values are for current rules, print the hash that contains the weights:
-
-```
-require pp
-default_firewall = resources(:firewall, 'default')
-pp default_firewall.rules
-```
-
-## Development
-
-This section details "quick development" steps. For a detailed explanation, see [[Contributing.md]].
-
-1. Clone this repository from GitHub:
-
-`$ git clone git@github.com:chef-cookbooks/firewall.git`
-
-1. Create a git branch
-
-`$ git checkout -b my_bug_fix`
-
-1. Install dependencies:
-
-`$ bundle install`
-
-1. Make your changes/patches/fixes, committing appropiately
-1. **Write tests**
-1. Run the tests:
-
-- `bundle exec foodcritic -f any .`
-- `bundle exec rspec`
-- `bundle exec rubocop`
-- `bundle exec kitchen test`
-
-In detail:
-
-- Foodcritic will catch any Chef-specific style errors
-- RSpec will run the unit tests
-- Rubocop will check for Ruby-specific style errors
-- Test Kitchen will run and converge the recipes
-
-## Contributors
-
-This project exists thanks to all the people
-who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
-
-[0]: https://mosh.mit.edu/
diff --git a/lc-gdn-chef/cookbooks/firewall/TODO.md b/lc-gdn-chef/cookbooks/firewall/TODO.md
deleted file mode 100644
index b315727d861cce79c6fd1ba464e7fed13248b7fc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/TODO.md
+++ /dev/null
@@ -1,5 +0,0 @@
-- update for rhel-8+ nftables, RHEL docs recommend nftables for new
- firewalls
-- fix windows tests
-- iptables' `-S` not supported in libraries/provider_firewall_iptables.rb
-- save action might not make sense for firewalls
diff --git a/lc-gdn-chef/cookbooks/firewall/attributes/default.rb b/lc-gdn-chef/cookbooks/firewall/attributes/default.rb
deleted file mode 100644
index 7f72dcf06c0f4ecb13725b384d5963d1796cf384..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/attributes/default.rb
+++ /dev/null
@@ -1,5 +0,0 @@
-default['firewall']['allow_ssh'] = false
-default['firewall']['allow_winrm'] = false
-default['firewall']['allow_mosh'] = false
-default['firewall']['allow_loopback'] = false
-default['firewall']['allow_icmp'] = false
diff --git a/lc-gdn-chef/cookbooks/firewall/attributes/firewalld.rb b/lc-gdn-chef/cookbooks/firewall/attributes/firewalld.rb
deleted file mode 100644
index bc26d60939f1003bb8c7269bf330b83e2e67a4c5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/attributes/firewalld.rb
+++ /dev/null
@@ -1 +0,0 @@
-default['firewall']['firewalld']['permanent'] = false
diff --git a/lc-gdn-chef/cookbooks/firewall/attributes/iptables.rb b/lc-gdn-chef/cookbooks/firewall/attributes/iptables.rb
deleted file mode 100644
index 551916a3e9ff4fdf2b0743994f885f994e8c943f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/attributes/iptables.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-default['firewall']['iptables']['defaults'][:policy] = {
- input: 'DROP',
- forward: 'DROP',
- output: 'ACCEPT',
-}
-default['firewall']['iptables']['defaults'][:ruleset] = {
- '*filter' => 1,
- ":INPUT #{node['firewall']['iptables']['defaults']['policy']['input']}" => 2,
- ":FORWARD #{node['firewall']['iptables']['defaults']['policy']['forward']}" => 3,
- ":OUTPUT #{node['firewall']['iptables']['defaults']['policy']['output']}" => 4,
- 'COMMIT_FILTER' => 100,
-}
-
-default['firewall']['ubuntu_iptables'] = false
-default['firewall']['redhat7_iptables'] = false
-default['firewall']['allow_established'] = true
-default['firewall']['ipv6_enabled'] = true
diff --git a/lc-gdn-chef/cookbooks/firewall/attributes/ufw.rb b/lc-gdn-chef/cookbooks/firewall/attributes/ufw.rb
deleted file mode 100644
index 35c8366b0535ca9b55df7422bdbaeeb5c32276f5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/attributes/ufw.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-default['firewall']['ufw']['defaults'] = {
- ipv6: 'yes',
- manage_builtins: 'no',
- ipt_sysctl: '/etc/ufw/sysctl.conf',
- ipt_modules: 'nf_conntrack_ftp nf_nat_ftp nf_conntrack_netbios_ns',
- policy: {
- input: 'DROP',
- output: 'ACCEPT',
- forward: 'DROP',
- application: 'SKIP',
- },
-}
diff --git a/lc-gdn-chef/cookbooks/firewall/attributes/windows.rb b/lc-gdn-chef/cookbooks/firewall/attributes/windows.rb
deleted file mode 100644
index 382dd82a608f0831222f5defd36f47b06399de4a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/attributes/windows.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# Windows platform defult settings: block undefined inbould traffic, allow all outgoing traffic
-
-default['firewall']['windows']['defaults'] = {
- policy: {
- input: 'blockinbound',
- output: 'allowoutbound',
- },
-}
diff --git a/lc-gdn-chef/cookbooks/firewall/chefignore b/lc-gdn-chef/cookbooks/firewall/chefignore
deleted file mode 100644
index cc170ea79ed8bde58bbb77030c0c2ab70b959c21..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen.yml*
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/firewall/kitchen.dokken.yml b/lc-gdn-chef/cookbooks/firewall/kitchen.dokken.yml
deleted file mode 100644
index d5e87a8e7d3ad66b51ba056d72c7880283242f30..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/kitchen.dokken.yml
+++ /dev/null
@@ -1,62 +0,0 @@
----
-driver:
- name: dokken
- privileged: true # because Docker and SystemD/Upstart
- chef_version: <%= ENV['CHEF_VERSION'] || 'current' %>
- chef_license: accept-no-persist
-
-transport:
- name: dokken
-
-provisioner:
- name: dokken
-
-platforms:
- - name: almalinux-8
- driver:
- image: dokken/almalinux-8
- pid_one_command: /usr/lib/systemd/systemd
- - name: amazonlinux-2
- driver:
- image: dokken/amazonlinux-2
- pid_one_command: /usr/lib/systemd/systemd
- - name: debian-9
- driver:
- image: dokken/debian-9
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
- - name: debian-10
- driver:
- image: dokken/debian-10
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
- - name: centos-7
- driver:
- image: dokken/centos-7
- pid_one_command: /usr/lib/systemd/systemd
- - name: centos-stream-8
- driver:
- image: dokken/centos-stream-8
- pid_one_command: /usr/lib/systemd/systemd
- - name: fedora-latest
- driver:
- image: dokken/fedora-latest
- pid_one_command: /usr/lib/systemd/systemd
- - name: ubuntu-18.04
- driver:
- image: dokken/ubuntu-18.04
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
- - name: ubuntu-20.04
- driver:
- image: dokken/ubuntu-20.04
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
- - name: opensuse-leap-15
- driver:
- image: dokken/opensuse-leap-15
- pid_one_command: /bin/systemd
diff --git a/lc-gdn-chef/cookbooks/firewall/kitchen.windows.yml b/lc-gdn-chef/cookbooks/firewall/kitchen.windows.yml
deleted file mode 100644
index 5a92745605711836133f3470d9cda589772b25b0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/kitchen.windows.yml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-driver:
- name: exec
- gui: false
- customize:
- memory: 4096
-
-transport:
- name: exec
-
-provisioner:
- name: chef_zero
- enforce_idempotency: true
- multiple_converge: 2
- deprecations_as_errors: true
-
-platforms:
- - name: windows-latest
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers.rb
deleted file mode 100644
index 497c586d0b516b1406a33bf3fe4791081ee7dfe4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers.rb
+++ /dev/null
@@ -1,105 +0,0 @@
-module FirewallCookbook
- module Helpers
- def dport_calc(new_resource)
- new_resource.dest_port || new_resource.port
- end
-
- def port_to_s(p)
- if p.is_a?(String)
- p
- elsif p && p.is_a?(Integer)
- p.to_s
- elsif p && p.is_a?(Array)
- p_strings = p.map { |o| port_to_s(o) }
- p_strings.sort.join(',')
- elsif p && p.is_a?(Range)
- if platform_family?('windows')
- "#{p.first}-#{p.last}"
- else
- "#{p.first}:#{p.last}"
- end
- end
- end
-
- def ipv6_enabled?(new_resource)
- new_resource.ipv6_enabled
- end
-
- def disabled?(new_resource)
- # if either flag is found in the non-default boolean state
- disable_flag = !(new_resource.enabled && !new_resource.disabled)
-
- Chef::Log.warn("#{new_resource} has been disabled, not proceeding") if disable_flag
- disable_flag
- end
-
- def ip_with_mask(new_resource, ip)
- if ip.include?('/')
- ip
- elsif ipv4_rule?(new_resource)
- "#{ip}/32"
- elsif ipv6_rule?(new_resource)
- "#{ip}/128"
- else
- ip
- end
- end
-
- # ipv4-specific rule?
- def ipv4_rule?(new_resource)
- if (new_resource.source && IPAddr.new(new_resource.source).ipv4?) ||
- (new_resource.destination && IPAddr.new(new_resource.destination).ipv4?)
- true
- else
- false
- end
- end
-
- # ipv6-specific rule?
- def ipv6_rule?(new_resource)
- if (new_resource.source && IPAddr.new(new_resource.source).ipv6?) ||
- (new_resource.destination && IPAddr.new(new_resource.destination).ipv6?) ||
- new_resource.protocol =~ /ipv6/ ||
- new_resource.protocol =~ /icmpv6/
- true
- else
- false
- end
- end
-
- def debian?(current_node)
- current_node['platform_family'] == 'debian'
- end
-
- def build_rule_file(rules)
- contents = []
- sorted_values = rules.values.sort.uniq
- sorted_values.each do |sorted_value|
- contents << "# position #{sorted_value}"
- rules.each do |k, v|
- next unless v == sorted_value
-
- contents << if repeatable_directives(k)
- k[/[^_]+/]
- else
- k
- end
- end
- end
- "#{contents.join("\n")}\n"
- end
-
- def repeatable_directives(s)
- %w(:OUTPUT :INPUT :POSTROUTING :PREROUTING COMMIT).each do |special|
- return true if s.start_with?(special)
- end
-
- false
- end
-
- def default_description(new_resource)
- new_resource.description ||
- "Generated by chef from #{cookbook_name}[#{recipe_name}] by #{new_resource}"
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld.rb
deleted file mode 100644
index 5deb40250e6ea3adc8c675418617a29497e47fa7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-module FirewallCookbook
- module Helpers
- module Firewalld
- include FirewallCookbook::Helpers
- include Chef::Mixin::ShellOut
-
- def firewalld_rules_filename
- '/etc/sysconfig/firewalld-chef.rules'
- end
-
- def firewalld_rule!(cmd)
- shell_out!(cmd, input: 'yes')
- end
-
- def firewalld_active?
- cmd = shell_out('firewall-cmd', '--state')
- cmd.stdout =~ /^running$/
- end
-
- def firewalld_default_zone?(z)
- return false unless firewalld_active?
-
- cmd = shell_out('firewall-cmd', '--get-default-zone')
- cmd.stdout =~ /^#{z}$/
- end
-
- def firewalld_default_zone!(z)
- raise 'firewalld not active' unless firewalld_active?
-
- shell_out!('firewall-cmd', "--set-default-zone=#{z}")
- end
-
- def log_current_firewalld
- shell_out!('firewall-cmd --direct --get-all-rules')
- end
-
- def firewalld_flush!
- raise 'firewall not active' unless firewalld_active?
-
- shell_out!('firewall-cmd', '--direct', '--remove-rules', 'ipv4', 'filter', 'INPUT')
- shell_out!('firewall-cmd', '--direct', '--remove-rules', 'ipv4', 'filter', 'OUTPUT')
- shell_out!('firewall-cmd', '--direct', '--permanent', '--remove-rules', 'ipv4', 'filter', 'INPUT')
- shell_out!('firewall-cmd', '--direct', '--permanent', '--remove-rules', 'ipv4', 'filter', 'OUTPUT')
- end
-
- def firewalld_all_rules_permanent!
- raise 'firewall not active' unless firewalld_active?
-
- rules = shell_out!('firewall-cmd', '--direct', '--get-all-rules').stdout
- perm_rules = shell_out!('firewall-cmd', '--direct', '--permanent', '--get-all-rules').stdout
- rules == perm_rules
- end
-
- def firewalld_save!
- raise 'firewall not active' unless firewalld_active?
-
- shell_out!('firewall-cmd', '--direct', '--permanent', '--remove-rules', 'ipv4', 'filter', 'INPUT')
- shell_out!('firewall-cmd', '--direct', '--permanent', '--remove-rules', 'ipv4', 'filter', 'OUTPUT')
- shell_out!('firewall-cmd', '--direct', '--get-all-rules').stdout.lines do |line|
- shell_out!("firewall-cmd --direct --permanent --add-rule #{line}")
- end
- end
-
- def ip_versions(resource)
- if ipv4_rule?(resource)
- %w(ipv4)
- elsif ipv6_rule?(resource)
- %w(ipv6)
- else # no source or destination address, add rules for both ipv4 and ipv6
- %w(ipv4 ipv6)
- end
- end
-
- CHAIN = { in: 'INPUT', out: 'OUTPUT', pre: 'PREROUTING', post: 'POSTROUTING' }.freeze unless defined? CHAIN # , nil => "FORWARD"}
- TARGET = { allow: 'ACCEPT', reject: 'REJECT', deny: 'DROP', masquerade: 'MASQUERADE', redirect: 'REDIRECT', log: 'LOG --log-prefix \'iptables: \' --log-level 7' }.freeze unless defined? TARGET
-
- def build_firewall_rule(new_resource, ip_version = 'ipv4')
- return new_resource.raw.strip if new_resource.raw
-
- type = new_resource.command
- firewall_rule = if new_resource.direction
- "#{ip_version} filter #{CHAIN[new_resource.direction.to_sym]} "
- else
- "#{ip_version} filter FORWARD "
- end
- firewall_rule << "#{new_resource.position} "
-
- if [:pre, :post].include?(new_resource.direction)
- firewall_rule << '-t nat '
- end
-
- # Firewalld order of prameters is important here see example output below:
- # ipv4 filter INPUT 1 -s 1.2.3.4/32 -d 5.6.7.8/32 -i lo -p tcp -m tcp -m state --state NEW -m comment --comment "hello" -j DROP
- firewall_rule << "-s #{ip_with_mask(new_resource, new_resource.source)} " if new_resource.source && new_resource.source != '0.0.0.0/0'
- firewall_rule << "-d #{new_resource.destination} " if new_resource.destination
-
- firewall_rule << "-i #{new_resource.interface} " if new_resource.interface
- firewall_rule << "-o #{new_resource.dest_interface} " if new_resource.dest_interface
-
- firewall_rule << "-p #{new_resource.protocol} " if new_resource.protocol && new_resource.protocol.to_s.to_sym != :none
- firewall_rule << '-m tcp ' if new_resource.protocol && new_resource.protocol.to_s.to_sym == :tcp
-
- # using multiport here allows us to simplify our greps and rule building
- firewall_rule << "-m multiport --sports #{port_to_s(new_resource.source_port)} " if new_resource.source_port
- firewall_rule << "-m multiport --dports #{port_to_s(dport_calc(new_resource))} " if dport_calc(new_resource)
-
- firewall_rule << "-m state --state #{new_resource.stateful.is_a?(Array) ? new_resource.stateful.join(',').upcase : new_resource.stateful.to_s.upcase} " if new_resource.stateful
- firewall_rule << "-m comment --comment '#{new_resource.description}' " if new_resource.include_comment
- firewall_rule << "-j #{TARGET[type]} "
- firewall_rule << "--to-ports #{new_resource.redirect_port} " if type == :redirect
- firewall_rule.strip!
- firewall_rule
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld_dbus.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld_dbus.rb
deleted file mode 100644
index c78b90497d0e101032d0f99cb2b6a72147c4ac23..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_firewalld_dbus.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-module FirewallCookbook
- module Helpers
- module FirewalldDBus
- def firewalld(system_bus)
- system_bus['org.fedoraproject.FirewallD1']
- end
-
- def firewalld_object(system_bus)
- firewalld(system_bus)['/org/fedoraproject/FirewallD1']
- end
-
- def firewalld_interface(system_bus)
- firewalld_object(system_bus)['org.fedoraproject.FirewallD1']
- end
-
- def config_object(system_bus)
- firewalld(system_bus)['/org/fedoraproject/FirewallD1/config']
- end
-
- def config_interface(system_bus)
- config_object(system_bus)['org.fedoraproject.FirewallD1.config']
- end
-
- def icmptype_interface(dbus, icmptype_path)
- icmptype_object = firewalld(dbus)[icmptype_path]
- icmptype_object['org.fedoraproject.FirewallD1.config.icmptype']
- end
-
- def ipset_interface(dbus, ipset_path)
- ipset_object = firewalld(dbus)[ipset_path]
- ipset_object['org.fedoraproject.FirewallD1.config.ipset']
- end
-
- def helper_interface(dbus, helper_path)
- helper_object = firewalld(dbus)[helper_path]
- helper_object['org.fedoraproject.FirewallD1.config.helper']
- end
-
- def service_interface(dbus, service_path)
- service_object = firewalld(dbus)[service_path]
- service_object['org.fedoraproject.FirewallD1.config.service']
- end
-
- def policy_interface(dbus, policy_path)
- policy_object = firewalld(dbus)[policy_path]
- policy_object['org.fedoraproject.FirewallD1.config.policy']
- end
-
- def zone_interface(dbus, zone_path)
- zone_object = firewalld(dbus)[zone_path]
- zone_object['org.fedoraproject.FirewallD1.config.zone']
- end
-
- # port=portid[-portid]:proto=protocol[:toport=portid[-portid]][:toaddr=address[/mask]]
- def parse_forward_ports(forward_ports)
- port_regex = %r{port=([\w-]+):proto=([\w]+)(:toport=([\w-]+)|)(:toaddr=([\d\./]+)|)}
- captures = forward_ports.match(port_regex).captures
- captures.delete_at(4)
- captures.delete_at(2)
- captures.map { |e| e || '' }
- end
-
- def forward_ports_to_dbus(new_resource)
- fwp = new_resource.forward_ports.map do |e|
- parse_forward_ports(e)
- end
- new_resource.forward_ports = fwp
- DBus.variant('a(ssss)', new_resource.forward_ports)
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_iptables.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_iptables.rb
deleted file mode 100644
index 37714143bb31c34fec405df496ef0af23b900cfe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_iptables.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-module FirewallCookbook
- module Helpers
- module Iptables
- include FirewallCookbook::Helpers
- include Chef::Mixin::ShellOut
-
- CHAIN = { in: 'INPUT', out: 'OUTPUT', pre: 'PREROUTING', post: 'POSTROUTING' }.freeze unless defined? CHAIN # , nil => "FORWARD"}
- TARGET = { allow: 'ACCEPT', reject: 'REJECT', deny: 'DROP', masquerade: 'MASQUERADE', redirect: 'REDIRECT', log: 'LOG --log-prefix "iptables: " --log-level 7' }.freeze unless defined? TARGET
-
- def build_firewall_rule(current_node, rule_resource, ipv6 = false)
- el5 = current_node['platform_family'] == 'rhel' && Gem::Dependency.new('', '~> 5.0').match?('', current_node['platform_version'])
-
- return rule_resource.raw.strip if rule_resource.raw
- firewall_rule = if rule_resource.direction
- "-A #{CHAIN[rule_resource.direction.to_sym]} "
- else
- '-A FORWARD '
- end
-
- if [:pre, :post].include?(rule_resource.direction)
- firewall_rule << '-t nat '
- end
-
- # Iptables order of prameters is important here see example output below:
- # -A INPUT -s 1.2.3.4/32 -d 5.6.7.8/32 -i lo -p tcp -m tcp -m state --state NEW -m comment --comment "hello" -j DROP
- firewall_rule << "-s #{ip_with_mask(rule_resource, rule_resource.source)} " if rule_resource.source && rule_resource.source != '0.0.0.0/0'
- firewall_rule << "-d #{rule_resource.destination} " if rule_resource.destination
-
- firewall_rule << "-i #{rule_resource.interface} " if rule_resource.interface
- firewall_rule << "-o #{rule_resource.dest_interface} " if rule_resource.dest_interface
-
- firewall_rule << "-p #{rule_resource.protocol} " if rule_resource.protocol && rule_resource.protocol.to_s.to_sym != :none
- firewall_rule << '-m tcp ' if rule_resource.protocol && rule_resource.protocol.to_s.to_sym == :tcp
-
- # using multiport here allows us to simplify our greps and rule building
- firewall_rule << "-m multiport --sports #{port_to_s(rule_resource.source_port)} " if rule_resource.source_port
- firewall_rule << "-m multiport --dports #{port_to_s(dport_calc(rule_resource))} " if dport_calc(rule_resource)
-
- firewall_rule << "-m state --state #{rule_resource.stateful.is_a?(Array) ? rule_resource.stateful.join(',').upcase : rule_resource.stateful.upcase} " if rule_resource.stateful
- # the comments extension is not available for ip6tables on rhel/centos 5
- unless el5 && ipv6
- firewall_rule << "-m comment --comment \"#{rule_resource.description}\" " if rule_resource.include_comment
- end
-
- firewall_rule << "-j #{TARGET[rule_resource.command.to_sym]} "
- firewall_rule << "--to-ports #{rule_resource.redirect_port} " if rule_resource.command == :redirect
- firewall_rule.strip!
- firewall_rule
- end
-
- def iptables_packages(new_resource)
- packages = if ipv6_enabled?(new_resource) && !amazon_linux? && node['platform_version'].to_i < 8
- %w(iptables iptables-ipv6)
- else
- %w(iptables)
- end
-
- # centos 7 requires extra service
- if (!debian?(node) && node['platform_version'].to_i >= 7) || amazon_linux?
- packages << %w(iptables-services)
- end
-
- packages.flatten
- end
-
- def iptables_commands(new_resource)
- if ipv6_enabled?(new_resource)
- %w(iptables ip6tables)
- else
- %w(iptables)
- end
- end
-
- def log_iptables(new_resource)
- iptables_commands(new_resource).each do |cmd|
- shell_out!("#{cmd} -L -n")
- end
- rescue
- Chef::Log.info('log_iptables failed!')
- end
-
- def iptables_flush!(new_resource)
- iptables_commands(new_resource).each do |cmd|
- shell_out!("#{cmd} -F")
- end
- end
-
- def iptables_default_allow!(new_resource)
- iptables_commands(new_resource).each do |cmd|
- shell_out!("#{cmd} -P INPUT ACCEPT")
- shell_out!("#{cmd} -P OUTPUT ACCEPT")
- shell_out!("#{cmd} -P FORWARD ACCEPT")
- end
- end
-
- def default_ruleset(current_node)
- current_node['firewall']['iptables']['defaults'][:ruleset].to_h
- end
-
- def ensure_default_rules_exist(current_node, new_resource)
- input = new_resource.rules
-
- # don't use iptables_commands here since we do populate the
- # hash regardless of ipv6 status
- %w(iptables ip6tables).each do |name|
- input[name] = {} unless input[name]
- input[name].merge!(default_ruleset(current_node).to_h)
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_nftables.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_nftables.rb
deleted file mode 100644
index 1ac2dfd762663ca8b1b5dfabd0e2f843fbe81bd5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_nftables.rb
+++ /dev/null
@@ -1,159 +0,0 @@
-module FirewallCookbook
- module Helpers
- module Nftables
- include FirewallCookbook::Helpers
-
- CHAIN ||= {
- in: 'INPUT',
- out: 'OUTPUT',
- pre: 'PREROUTING',
- post: 'POSTROUTING',
- forward: 'FORWARD',
- }.freeze
-
- TARGET ||= {
- accept: 'accept',
- allow: 'accept',
- counter: 'counter',
- deny: 'drop',
- drop: 'drop',
- log: 'log',
- masquerade: 'masquerade',
- redirect: 'redirect',
- reject: 'reject',
- }.freeze
-
- def port_to_s(ports)
- case ports
- when String
- ports
- when Integer
- ports.to_s
- when Array
- p_strings = ports.map { |o| port_to_s(o) }
- "{#{p_strings.sort.join(',')}}"
- when Range
- "#{ports.first}-#{ports.last}"
- else
- raise "unknown class of port definition: #{ports.class}"
- end
- end
-
- def nftables_command_log(rule_resource)
- log_prefix = 'prefix '
- log_prefix << if rule_resource.log_prefix.nil?
- "\"#{CHAIN[rule_resource.direction]}:\""
- else
- "\"#{rule_resource.log_prefix}\""
- end
- log_group = if rule_resource.log_group.nil?
- nil
- else
- "group #{rule_resource.log_group} "
- end
- "log #{log_prefix} #{log_group}"
- end
-
- def nftables_command_redirect(rule_resource)
- if rule_resource.redirect_port.nil?
- raise 'Specify redirect_port when using :redirect as commmand'
- end
-
- "redirect to #{rule_resource.redirect_port} "
- end
-
- def nftables_commands(rule_resource)
- firewall_rule = ''
- Array(rule_resource.command).each do |command|
- begin
- target = TARGET.fetch(command)
- rescue KeyError
- raise "Invalid command: #{command.inspect}. Use one of #{TARGET.keys}"
- end
- firewall_rule << case target
- when 'log'
- nftables_command_log(rule_resource)
- when 'redirect'
- nftables_command_redirect(rule_resource)
- else
- "#{TARGET[command.to_sym]} "
- end
- end
- firewall_rule
- end
-
- def build_firewall_rule(rule_resource)
- return rule_resource.raw.strip if rule_resource.raw
-
- ip = ipv6_rule?(rule_resource) ? 'ip6' : 'ip'
- table = if [:pre, :post].include?(rule_resource.direction)
- 'nat'
- else
- 'filter'
- end
- firewall_rule = if table == 'nat'
- "add rule #{ip} #{table} "
- else
- "add rule inet #{table} "
- end
- firewall_rule << "#{CHAIN.fetch(rule_resource.direction.to_sym, 'FORWARD')} "
-
- firewall_rule << "iif #{rule_resource.interface} " if rule_resource.interface
- firewall_rule << "oif #{rule_resource.outerface} " if rule_resource.outerface
-
- if rule_resource.source
- source_with_mask = ip_with_mask(rule_resource, rule_resource.source)
- if source_with_mask != '0.0.0.0/0' && source_with_mask != '::/128'
- firewall_rule << "#{ip} saddr #{source_with_mask} "
- end
- end
- firewall_rule << "#{ip} daddr #{rule_resource.destination} " if rule_resource.destination
-
- case rule_resource.protocol
- when :icmp
- firewall_rule << 'icmp type echo-request '
- when :'ipv6-icmp', :icmpv6
- firewall_rule << 'icmpv6 type { echo-request, nd-router-solicit, nd-neighbor-solicit, nd-router-advert, nd-neighbor-advert } '
- when :tcp, :udp
- firewall_rule << "#{rule_resource.protocol} sport #{port_to_s(rule_resource.sport)} " if rule_resource.sport
- firewall_rule << "#{rule_resource.protocol} dport #{port_to_s(rule_resource.dport)} " if rule_resource.dport
- when :esp, :ah
- firewall_rule << "#{ip} #{ip == 'ip6' ? 'nexthdr' : 'protocol'} #{rule_resource.protocol} "
- when :ipv6, :none
- # nothing to do
- end
-
- firewall_rule << "ct state #{Array(rule_resource.stateful).join(',').downcase} " if rule_resource.stateful
- firewall_rule << nftables_commands(rule_resource)
- firewall_rule << "comment \"#{rule_resource.description}\" " if rule_resource.include_comment
- firewall_rule.strip!
- firewall_rule
- end
-
- def default_ruleset(new_resource)
- rules = {
- 'add table inet filter' => 1,
- "add chain inet filter INPUT { type filter hook input priority 0 ; policy #{new_resource.input_policy}; }" => 2,
- "add chain inet filter OUTPUT { type filter hook output priority 0 ; policy #{new_resource.output_policy}; }" => 2,
- "add chain inet filter FOWARD { type filter hook forward priority 0 ; policy #{new_resource.forward_policy}; }" => 2,
- }
- if new_resource.table_ip_nat
- rules['add table ip nat'] = 1
- rules['add chain ip nat POSTROUTING { type nat hook postrouting priority 100 ;}'] = 2
- rules['add chain ip nat PREROUTING { type nat hook prerouting priority -100 ;}'] = 2
- end
- if new_resource.table_ip6_nat
- rules['add table ip6 nat'] = 1
- rules['add chain ip6 nat POSTROUTING { type nat hook postrouting priority 100 ;}'] = 2
- rules['add chain ip6 nat PREROUTING { type nat hook prerouting priority -100 ;}'] = 2
- end
- rules
- end
-
- def ensure_default_rules_exist(new_resource)
- input = new_resource.rules || {}
- input.merge!(default_ruleset(new_resource))
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_ufw.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_ufw.rb
deleted file mode 100644
index 9e035c5b88b60da91152c346742e2ee1c78b21b8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_ufw.rb
+++ /dev/null
@@ -1,135 +0,0 @@
-module FirewallCookbook
- module Helpers
- module Ufw
- include FirewallCookbook::Helpers
- include Chef::Mixin::ShellOut
-
- def ufw_rules_filename
- '/etc/default/ufw-chef.rules'
- end
-
- def ufw_active?
- cmd = shell_out!('ufw', 'status')
- cmd.stdout =~ /^Status:\sactive/
- end
-
- def ufw_disable!
- shell_out!('ufw', 'disable', input: 'yes')
- end
-
- def ufw_enable!
- shell_out!('ufw', 'enable', input: 'yes')
- end
-
- def ufw_reset!
- shell_out!('ufw', 'reset', input: 'yes')
- end
-
- def ufw_logging!(param)
- shell_out!('ufw', 'logging', param.to_s)
- end
-
- def ufw_rule!(cmd)
- shell_out!(cmd, input: 'yes')
- end
-
- def build_rule(new_resource)
- Chef::Log.info("#{new_resource.name} apply_rule #{new_resource.command}")
-
- # if we don't do this, we may see some bugs where traffic is opened on all ports to all hosts when only RELATED,ESTABLISHED was intended
- if new_resource.stateful
- msg = ''
- msg << "firewall_rule[#{new_resource.name}] was asked to "
- msg << "#{new_resource.command} a stateful rule using #{new_resource.stateful} "
- msg << 'but ufw does not support this kind of rule. Consider guarding by platform_family.'
- raise msg
- end
-
- # if we don't do this, ufw will fail as it does not support protocol numbers, so we'll only allow it to run if specifying icmp/tcp/udp protocol types
- if new_resource.protocol && !new_resource.protocol.to_s.downcase.match('^(tcp|udp|esp|ah|ipv6|none)$')
- msg = ''
- msg << "firewall_rule[#{new_resource.name}] was asked to "
- msg << "#{new_resource.command} a rule using protocol #{new_resource.protocol} "
- msg << 'but ufw does not support this kind of rule. Consider guarding by platform_family.'
- raise msg
- end
-
- # some examples:
- # ufw allow from 192.168.0.4 to any port 22
- # ufw deny proto tcp from 10.0.0.0/8 to 192.168.0.1 port 25
- # ufw insert 1 allow proto tcp from 0.0.0.0/0 to 192.168.0.1 port 25
-
- if new_resource.raw
- "ufw #{new_resource.raw.strip}"
- else
- "ufw #{rule(new_resource)}"
- end
- end
-
- def rule(new_resource)
- rule = ''
- rule << "#{new_resource.command} "
- rule << rule_interface(new_resource)
- rule << rule_logging(new_resource)
- rule << rule_proto(new_resource)
- rule << rule_dest_port(new_resource)
- rule << rule_source_port(new_resource)
- rule = rule.strip
-
- if rule == 'ufw allow in proto tcp to any from any'
- Chef::Log.warn("firewall_rule[#{new_resource.name}] produced a rule that opens all traffic. This may be a logic error in your cookbook.")
- end
-
- rule
- end
-
- def rule_interface(new_resource)
- rule = ''
- rule << "#{new_resource.direction} " if new_resource.direction
- rule << "on #{new_resource.interface} " if new_resource.interface && new_resource.direction
- rule << "in on #{new_resource.interface} " if new_resource.interface && !new_resource.direction
- rule
- end
-
- def rule_proto(new_resource)
- rule = ''
- rule << "proto #{new_resource.protocol} " if new_resource.protocol && new_resource.protocol.to_s.to_sym != :none
- rule
- end
-
- def rule_dest_port(new_resource)
- rule = if new_resource.destination
- "to #{new_resource.destination} "
- else
- 'to any '
- end
- rule << "port #{port_to_s(dport_calc(new_resource))} " if dport_calc(new_resource)
- rule
- end
-
- def rule_source_port(new_resource)
- rule = if new_resource.source
- "from #{new_resource.source} "
- else
- 'from any '
- end
-
- if new_resource.source_port
- rule << "port #{port_to_s(new_resource.source_port)} "
- end
- rule
- end
-
- def rule_logging(new_resource)
- case new_resource.logging && new_resource.logging.to_sym
- when :connections
- 'log '
- when :packets
- 'log-all '
- else
- ''
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_windows.rb b/lc-gdn-chef/cookbooks/firewall/libraries/helpers_windows.rb
deleted file mode 100644
index 4cb47ac8e56de2ff52144c33669a34389ad5bfbc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/helpers_windows.rb
+++ /dev/null
@@ -1,129 +0,0 @@
-module FirewallCookbook
- module Helpers
- module Windows
- include FirewallCookbook::Helpers
- include Chef::Mixin::ShellOut
-
- def fixup_cidr(str)
- newstr = str.clone
- newstr.gsub!('0.0.0.0/0', 'any') if newstr.include?('0.0.0.0/0')
- newstr.gsub!('/0', '') if newstr.include?('/0')
- newstr
- end
-
- def windows_rules_filename
- "#{ENV['HOME']}/windows-chef.rules"
- end
-
- def active?
- @active ||= begin
- cmd = shell_out!('netsh advfirewall show currentprofile')
- cmd.stdout =~ /^State\sON/
- end
- end
-
- def enable!
- shell_out!('netsh advfirewall set currentprofile state on')
- end
-
- def disable!
- shell_out!('netsh advfirewall set currentprofile state off')
- end
-
- def reset!
- shell_out!('netsh advfirewall reset')
- end
-
- def add_rule!(params)
- shell_out!("netsh advfirewall #{params}")
- end
-
- def delete_all_rules!
- shell_out!('netsh advfirewall firewall delete rule name=all')
- end
-
- def to_type(new_resource)
- cmd = new_resource.command
- if cmd == :reject || cmd == :deny
- :block
- else
- :allow
- end
- end
-
- def build_rule(new_resource)
- type = to_type(new_resource)
- parameters = {}
-
- parameters['description'] = "\"#{new_resource.description}\""
- parameters['dir'] = new_resource.direction
-
- new_resource.program && parameters['program'] = new_resource.program
- new_resource.service && parameters['service'] = new_resource.service
- parameters['protocol'] = new_resource.protocol
-
- if new_resource.direction.to_sym == :out
- parameters['localip'] = new_resource.source ? fixup_cidr(new_resource.source) : 'any'
- parameters['localport'] = new_resource.source_port ? port_to_s(new_resource.source_port) : 'any'
- parameters['interfacetype'] = new_resource.interface || 'any'
- parameters['remoteip'] = new_resource.destination ? fixup_cidr(new_resource.destination) : 'any'
- parameters['remoteport'] = new_resource.dest_port ? port_to_s(new_resource.dest_port) : 'any'
- else
- parameters['localip'] = new_resource.destination || 'any'
- parameters['localport'] = dport_calc(new_resource) ? port_to_s(dport_calc(new_resource)) : 'any'
- parameters['interfacetype'] = new_resource.dest_interface || 'any'
- parameters['remoteip'] = new_resource.source ? fixup_cidr(new_resource.source) : 'any'
- parameters['remoteport'] = new_resource.source_port ? port_to_s(new_resource.source_port) : 'any'
- end
-
- parameters['action'] = type.to_s
-
- partial_command = parameters.map { |k, v| "#{k}=#{v}" }.join(' ')
- "firewall add rule name=\"#{new_resource.name}\" #{partial_command}"
- end
-
- def rule_exists?(name)
- @exists ||= begin
- cmd = shell_out!("netsh advfirewall firewall show rule name=\"#{name}\"", returns: [0, 1])
- cmd.stdout !~ /^No rules match the specified criteria/
- end
- end
-
- def show_all_rules!
- cmd = shell_out!('netsh advfirewall firewall show rule name=all')
- cmd.stdout.each_line do |line|
- Chef::Log.warn(line)
- end
- end
-
- def rule_up_to_date?(name, type)
- @up_to_date ||= begin
- desired_parameters = rule_parameters(type)
- current_parameters = {}
-
- cmd = shell_out!("netsh advfirewall firewall show rule name=\"#{name}\" verbose")
- cmd.stdout.each_line do |line|
- current_parameters['description'] = "\"#{Regexp.last_match(1).chomp}\"" if line =~ /^Description:\s+(.*)$/
- current_parameters['dir'] = Regexp.last_match(1).chomp if line =~ /^Direction:\s+(.*)$/
- current_parameters['program'] = Regexp.last_match(1).chomp if line =~ /^Program:\s+(.*)$/
- current_parameters['service'] = Regexp.last_match(1).chomp if line =~ /^Service:\s+(.*)$/
- current_parameters['protocol'] = Regexp.last_match(1).chomp if line =~ /^Protocol:\s+(.*)$/
- current_parameters['localip'] = Regexp.last_match(1).chomp if line =~ /^LocalIP:\s+(.*)$/
- current_parameters['localport'] = Regexp.last_match(1).chomp if line =~ /^LocalPort:\s+(.*)$/
- current_parameters['interfacetype'] = Regexp.last_match(1).chomp if line =~ /^InterfaceTypes:\s+(.*)$/
- current_parameters['remoteip'] = Regexp.last_match(1).chomp if line =~ /^RemoteIP:\s+(.*)$/
- current_parameters['remoteport'] = Regexp.last_match(1).chomp if line =~ /^RemotePort:\s+(.*)$/
- current_parameters['action'] = Regexp.last_match(1).chomp if line =~ /^Action:\s+(.*)$/
- end
-
- up_to_date = true
- desired_parameters.each do |k, v|
- up_to_date = false if current_parameters[k] !~ /^["]?#{v}["]?$/i
- end
-
- up_to_date
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_firewalld.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_firewalld.rb
deleted file mode 100644
index b8a3c1dc51b395708058b7d3e616d1c9633d6047..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_firewalld.rb
+++ /dev/null
@@ -1,179 +0,0 @@
-#
-# Author:: Ronald Doorn ()
-# Cookbook:: firewall
-# Resource:: default
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallFirewalld < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers::Firewalld
-
- provides :firewall, os: 'linux', platform_family: %w(rhel fedora amazon) do |node|
- (node['platform_version'].to_i >= 7 && !node['firewall']['redhat7_iptables']) || (amazon_linux? && !node['firewall']['redhat7_iptables'])
- end
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- firewalld_package = package 'firewalld' do
- action :nothing
- options new_resource.package_options
- end
- firewalld_package.run_action(:install)
- new_resource.updated_by_last_action(firewalld_package.updated_by_last_action?)
-
- unless ::File.exist?(firewalld_rules_filename)
- rules_file = lookup_or_create_rulesfile
- rules_file.content '# created by chef to allow service to start'
- rules_file.run_action(:create)
- new_resource.updated_by_last_action(rules_file.updated_by_last_action?)
- end
-
- firewalld_service = lookup_or_create_service
- [:enable, :start].each do |a|
- firewalld_service.run_action(a)
- new_resource.updated_by_last_action(firewalld_service.updated_by_last_action?)
- end
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- new_resource.rules['firewalld'] = {} unless new_resource.rules['firewalld']
-
- # this populates the hash of rules from firewall_rule resources
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- ip_versions(firewall_rule).each do |ip_version|
- # build rules to apply with weight
- k = "firewall-cmd --direct --add-rule #{build_firewall_rule(firewall_rule, ip_version)}"
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- next if new_resource.rules['firewalld'].key?(k) && new_resource.rules['firewalld'][k] == v
- new_resource.rules['firewalld'][k] = v
-
- # If persistent rules is enabled (default) make sure we add a permanent rule at the same time
- perm_rules = node && node['firewall'] && node['firewall']['firewalld'] && node['firewall']['firewalld']['permanent']
- if firewall_rule.permanent || perm_rules
- k = "firewall-cmd --permanent --direct --add-rule #{build_firewall_rule(firewall_rule, ip_version)}"
- new_resource.rules['firewalld'][k] = v
- end
- end
- end
-
- # ensure a file resource exists with the current firewalld rules
- rules_file = lookup_or_create_rulesfile
- rules_file.content build_rule_file(new_resource.rules['firewalld'])
- rules_file.run_action(:create)
-
- # ensure the service is running without waiting.
- firewalld_service = lookup_or_create_service
- [:enable, :start].each do |a|
- firewalld_service.run_action(a)
- new_resource.updated_by_last_action(firewalld_service.updated_by_last_action?)
- end
-
- # mark updated if we changed the zone
- unless firewalld_default_zone?(new_resource.enabled_zone)
- firewalld_default_zone!(new_resource.enabled_zone)
- new_resource.updated_by_last_action(true)
- end
-
- # if the file was changed, load new ruleset
- return unless rules_file.updated_by_last_action?
- firewalld_flush!
- # TODO: support logging
-
- new_resource.rules['firewalld'].sort_by { |_k, v| v }.map { |k, _v| k }.each do |cmd|
- firewalld_rule!(cmd)
- end
-
- new_resource.updated_by_last_action(true)
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- if firewalld_active?
- firewalld_flush!
- firewalld_default_zone!(new_resource.disabled_zone)
- new_resource.updated_by_last_action(true)
- end
-
- # ensure the service is stopped without waiting.
- firewalld_service = lookup_or_create_service
- [:disable, :stop].each do |a|
- firewalld_service.run_action(a)
- new_resource.updated_by_last_action(firewalld_service.updated_by_last_action?)
- end
-
- rules_file = lookup_or_create_rulesfile
- rules_file.content '# created by chef to allow service to start'
- rules_file.run_action(:create)
- new_resource.updated_by_last_action(rules_file.updated_by_last_action?)
- end
-
- action :flush do
- return if disabled?(new_resource)
- return unless firewalld_active?
-
- firewalld_flush!
- new_resource.updated_by_last_action(true)
-
- rules_file = lookup_or_create_rulesfile
- rules_file.content '# created by chef to allow service to start'
- rules_file.run_action(:create)
- new_resource.updated_by_last_action(rules_file.updated_by_last_action?)
- end
-
- action :save do
- return if disabled?(new_resource)
- return if firewalld_all_rules_permanent!
-
- firewalld_save!
- new_resource.updated_by_last_action(true)
- end
-
- def lookup_or_create_service
- begin
- firewalld_service = Chef.run_context.resource_collection.find(service: 'firewalld')
- rescue
- firewalld_service = service 'firewalld' do
- action :nothing
- end
- end
- firewalld_service
- end
-
- def lookup_or_create_rulesfile
- begin
- firewalld_file = Chef.run_context.resource_collection.find(file: firewalld_rules_filename)
- rescue
- firewalld_file = file firewalld_rules_filename do
- action :nothing
- end
- end
- firewalld_file
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables.rb
deleted file mode 100644
index 1878861bf39f59abc8e8b18e85278cdc718e9a99..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables.rb
+++ /dev/null
@@ -1,171 +0,0 @@
-#
-# Author:: Seth Chisamore ()
-# Cookbook:: firewall
-# Resource:: default
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallIptables < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::Iptables
-
- provides :firewall, os: 'linux', platform_family: %w(rhel fedora amazon) do |node|
- (node['platform_version'].to_i < 7 && !amazon_linux?) || node['platform_version'].to_i >= 8 || node['firewall']['redhat7_iptables']
- end
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- # Ensure the package is installed
- iptables_packages(new_resource).each do |p|
- iptables_pkg = package p do
- action :nothing
- end
- iptables_pkg.run_action(:install)
- new_resource.updated_by_last_action(true) if iptables_pkg.updated_by_last_action?
- end
-
- iptables_commands(new_resource).each do |svc|
- # must create empty file for service to start
- unless ::File.exist?("/etc/sysconfig/#{svc}")
- # must create empty file for service to start
- iptables_file = lookup_or_create_rulesfile(svc)
- iptables_file.content '# created by chef to allow service to start'
- iptables_file.run_action(:create)
- new_resource.updated_by_last_action(true) if iptables_file.updated_by_last_action?
- end
-
- iptables_service = lookup_or_create_service(svc)
- [:enable, :start].each do |a|
- iptables_service.run_action(a)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
- end
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # prints all the firewall rules
- log_iptables(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- ensure_default_rules_exist(node, new_resource)
-
- # this populates the hash of rules from firewall_rule resources
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- types = if ipv6_rule?(firewall_rule) # an ip4 specific rule
- %w(ip6tables)
- elsif ipv4_rule?(firewall_rule) # an ip6 specific rule
- %w(iptables)
- else # or not specific
- %w(iptables ip6tables)
- end
-
- types.each do |iptables_type|
- # build rules to apply with weight
- k = build_firewall_rule(node, firewall_rule, iptables_type == 'ip6tables')
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- next if new_resource.rules[iptables_type].key?(k) && new_resource.rules[iptables_type][k] == v
- new_resource.rules[iptables_type][k] = v
- end
- end
-
- iptables_commands(new_resource).each do |iptables_type|
- # this takes the commands in each hash entry and builds a rule file
- iptables_file = lookup_or_create_rulesfile(iptables_type)
- iptables_file.content build_rule_file(new_resource.rules[iptables_type])
- iptables_file.run_action(:create)
-
- # if the file was unchanged, skip loop iteration, otherwise restart iptables
- next unless iptables_file.updated_by_last_action?
-
- iptables_service = lookup_or_create_service(iptables_type)
- iptables_service.run_action(:restart)
- new_resource.updated_by_last_action(true)
- end
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- iptables_default_allow!(new_resource)
- new_resource.updated_by_last_action(true)
-
- iptables_commands(new_resource).each do |svc|
- iptables_service = lookup_or_create_service(svc)
- [:disable, :stop].each do |a|
- iptables_service.run_action(a)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
-
- # must create empty file for service to start
- iptables_file = lookup_or_create_rulesfile(svc)
- iptables_file.content '# created by chef to allow service to start'
- iptables_file.run_action(:create)
- new_resource.updated_by_last_action(true) if iptables_file.updated_by_last_action?
- end
- end
-
- action :flush do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- new_resource.updated_by_last_action(true)
-
- iptables_commands(new_resource).each do |svc|
- # must create empty file for service to start
- iptables_file = lookup_or_create_rulesfile(svc)
- iptables_file.content '# created by chef to allow service to start'
- iptables_file.run_action(:create)
- new_resource.updated_by_last_action(true) if iptables_file.updated_by_last_action?
- end
- end
-
- def lookup_or_create_service(name)
- begin
- iptables_service = Chef.run_context.resource_collection.find(service: svc)
- rescue
- iptables_service = service name do
- action :nothing
- end
- end
- iptables_service
- end
-
- def lookup_or_create_rulesfile(name)
- begin
- iptables_file = Chef.run_context.resource_collection.find(file: name)
- rescue
- iptables_file = file "/etc/sysconfig/#{name}" do
- action :nothing
- end
- end
- iptables_file
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu.rb
deleted file mode 100644
index 35a317327887ab204f49f90c870e88e434fab58e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu.rb
+++ /dev/null
@@ -1,200 +0,0 @@
-#
-# Author:: Seth Chisamore ()
-# Cookbook:: firewall
-# Resource:: default
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallIptablesUbuntu < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::Iptables
-
- provides :firewall, os: 'linux', platform_family: %w(debian) do |node|
- node['firewall'] && node['firewall']['ubuntu_iptables'] &&
- node['platform_version'].to_f > (node['platform'] == 'ubuntu' ? 14.04 : 7)
- end
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- # Ensure the package is installed
- pkg = package 'iptables-persistent' do
- action :nothing
- end
- pkg.run_action(:install)
- new_resource.updated_by_last_action(true) if pkg.updated_by_last_action?
-
- rule_files = %w(rules.v4)
- rule_files << 'rules.v6' if ipv6_enabled?(new_resource)
- rule_files.each do |svc|
- next if ::File.exist?("/etc/iptables/#{svc}")
-
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
-
- iptables_service = lookup_or_create_service('netfilter-persistent')
- [:enable, :start].each do |act|
- # iptables-persistent isn't a real service
- iptables_service.status_command 'true'
-
- iptables_service.run_action(act)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # prints all the firewall rules
- log_iptables(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- ensure_default_rules_exist(node, new_resource)
-
- # this populates the hash of rules from firewall_rule resources
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- types = if ipv6_rule?(firewall_rule) # an ip4 specific rule
- %w(ip6tables)
- elsif ipv4_rule?(firewall_rule) # an ip6 specific rule
- %w(iptables)
- else # or not specific
- %w(iptables ip6tables)
- end
-
- types.each do |iptables_type|
- # build rules to apply with weight
- k = build_firewall_rule(node, firewall_rule, iptables_type == 'ip6tables')
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- next if new_resource.rules[iptables_type].key?(k) && new_resource.rules[iptables_type][k] == v
- new_resource.rules[iptables_type][k] = v
- end
- end
-
- restart_service = false
-
- rule_files = %w(iptables)
- rule_files << 'ip6tables' if ipv6_enabled?(new_resource)
-
- rule_files.each do |iptables_type|
- iptables_filename = if iptables_type == 'ip6tables'
- '/etc/iptables/rules.v6'
- else
- '/etc/iptables/rules.v4'
- end
-
- # ensure a file resource exists with the current iptables rules
- begin
- iptables_file = Chef.run_context.resource_collection.find(file: iptables_filename)
- rescue
- iptables_file = file iptables_filename do
- action :nothing
- end
- end
- iptables_file.content build_rule_file(new_resource.rules[iptables_type])
- iptables_file.run_action(:create)
-
- # if the file was changed, restart iptables
- restart_service = true if iptables_file.updated_by_last_action?
- end
-
- if restart_service
- service_affected = service 'netfilter-persistent' do
- action :nothing
- end
- service_affected.run_action(:restart)
- new_resource.updated_by_last_action(true)
- end
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- iptables_default_allow!(new_resource)
- new_resource.updated_by_last_action(true)
-
- iptables_service = lookup_or_create_service('netfilter-persistent')
- [:disable, :stop].each do |act|
- iptables_service.run_action(act)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
-
- %w(rules.v4 rules.v6).each do |svc|
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
- end
-
- action :flush do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- new_resource.updated_by_last_action(true)
-
- rule_files = %w(rules.v4)
- rule_files << 'rules.v6' if ipv6_enabled?(new_resource)
- rule_files.each do |svc|
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
- end
-
- def lookup_or_create_service(name)
- begin
- iptables_service = Chef.run_context.resource_collection.find(service: svc)
- rescue
- iptables_service = service name do
- action :nothing
- end
- end
- iptables_service
- end
-
- def lookup_or_create_rulesfile(name)
- begin
- iptables_file = Chef.run_context.resource_collection.find(file: name)
- rescue
- iptables_file = file "/etc/iptables/#{name}" do
- action :nothing
- end
- end
- iptables_file
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu1404.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu1404.rb
deleted file mode 100644
index b8b277a8b84468e6a5cf469a819fce0729088d26..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_iptables_ubuntu1404.rb
+++ /dev/null
@@ -1,200 +0,0 @@
-#
-# Author:: Seth Chisamore ()
-# Cookbook:: firewall
-# Resource:: default
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallIptablesUbuntu1404 < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::Iptables
-
- provides :firewall, os: 'linux', platform_family: %w(debian) do |node|
- node['firewall'] && node['firewall']['ubuntu_iptables'] &&
- node['platform_version'].to_f <= (node['platform'] == 'ubuntu' ? 14.04 : 7)
- end
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- # Ensure the package is installed
- pkg = package 'iptables-persistent' do
- action :nothing
- end
- pkg.run_action(:install)
- new_resource.updated_by_last_action(true) if pkg.updated_by_last_action?
-
- rule_files = %w(rules.v4)
- rule_files << 'rules.v6' if ipv6_enabled?(new_resource)
- rule_files.each do |svc|
- next if ::File.exist?("/etc/iptables/#{svc}")
-
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
-
- iptables_service = lookup_or_create_service('iptables-persistent')
- [:enable, :start].each do |act|
- # iptables-persistent isn't a real service
- iptables_service.status_command 'true'
-
- iptables_service.run_action(act)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # prints all the firewall rules
- log_iptables(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- ensure_default_rules_exist(node, new_resource)
-
- # this populates the hash of rules from firewall_rule resources
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- types = if ipv6_rule?(firewall_rule) # an ip4 specific rule
- %w(ip6tables)
- elsif ipv4_rule?(firewall_rule) # an ip6 specific rule
- %w(iptables)
- else # or not specific
- %w(iptables ip6tables)
- end
-
- types.each do |iptables_type|
- # build rules to apply with weight
- k = build_firewall_rule(node, firewall_rule, iptables_type == 'ip6tables')
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- next if new_resource.rules[iptables_type].key?(k) && new_resource.rules[iptables_type][k] == v
- new_resource.rules[iptables_type][k] = v
- end
- end
-
- restart_service = false
-
- rule_files = %w(iptables)
- rule_files << 'ip6tables' if ipv6_enabled?(new_resource)
-
- rule_files.each do |iptables_type|
- iptables_filename = if iptables_type == 'ip6tables'
- '/etc/iptables/rules.v6'
- else
- '/etc/iptables/rules.v4'
- end
-
- # ensure a file resource exists with the current iptables rules
- begin
- iptables_file = Chef.run_context.resource_collection.find(file: iptables_filename)
- rescue
- iptables_file = file iptables_filename do
- action :nothing
- end
- end
- iptables_file.content build_rule_file(new_resource.rules[iptables_type])
- iptables_file.run_action(:create)
-
- # if the file was changed, restart iptables
- restart_service = true if iptables_file.updated_by_last_action?
- end
-
- if restart_service
- service_affected = service 'iptables-persistent' do
- action :nothing
- end
- service_affected.run_action(:restart)
- new_resource.updated_by_last_action(true)
- end
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- iptables_default_allow!(new_resource)
- new_resource.updated_by_last_action(true)
-
- iptables_service = lookup_or_create_service('iptables-persistent')
- [:disable, :stop].each do |act|
- iptables_service.run_action(act)
- new_resource.updated_by_last_action(true) if iptables_service.updated_by_last_action?
- end
-
- %w(rules.v4 rules.v6).each do |svc|
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
- end
-
- action :flush do
- return if disabled?(new_resource)
-
- iptables_flush!(new_resource)
- new_resource.updated_by_last_action(true)
-
- rule_files = %w(rules.v4)
- rule_files << 'rules.v6' if ipv6_enabled?(new_resource)
- rule_files.each do |svc|
- # must create empty file for service to start
- f = lookup_or_create_rulesfile(svc)
- f.content '# created by chef to allow service to start'
- f.run_action(:create)
-
- new_resource.updated_by_last_action(true) if f.updated_by_last_action?
- end
- end
-
- def lookup_or_create_service(name)
- begin
- iptables_service = Chef.run_context.resource_collection.find(service: svc)
- rescue
- iptables_service = service name do
- action :nothing
- end
- end
- iptables_service
- end
-
- def lookup_or_create_rulesfile(name)
- begin
- iptables_file = Chef.run_context.resource_collection.find(file: name)
- rescue
- iptables_file = file "/etc/iptables/#{name}" do
- action :nothing
- end
- end
- iptables_file
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_rule.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_rule.rb
deleted file mode 100644
index 2352cbc049fbda7e958760d8485ad9ff143fbce9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_rule.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Author:: Ronald Doorn ()
-# Cookbook:: firewall
-# Provider:: rule_iptables
-#
-# Copyright:: 2015-2016, computerlyrik
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallRuleGeneric < Chef::Provider::LWRPBase
- provides :firewall_rule
-
- action :create do
- return unless new_resource.notify_firewall
-
- firewall_resource = Chef.run_context.resource_collection.find(firewall: new_resource.firewall_name)
- raise 'could not find a firewall resource' unless firewall_resource
-
- new_resource.notifies(:restart, firewall_resource, :delayed)
- new_resource.updated_by_last_action(true)
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_ufw.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_ufw.rb
deleted file mode 100644
index ce059048491f4e3078dc78cb926a4a26aed502d1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_ufw.rb
+++ /dev/null
@@ -1,138 +0,0 @@
-#
-# Author:: Seth Chisamore ()
-# Cookbook:: firewall
-# Resource:: default
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-class Chef
- class Provider::FirewallUfw < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers::Ufw
-
- provides :firewall, os: 'linux', platform_family: %w(debian) do |node|
- !(node['firewall'] && node['firewall']['ubuntu_iptables'])
- end
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- pkg_ufw = package 'ufw' do
- action :nothing
- end
- pkg_ufw.run_action(:install)
- new_resource.updated_by_last_action(true) if pkg_ufw.updated_by_last_action?
-
- defaults_ufw = template '/etc/default/ufw' do
- action :nothing
- owner 'root'
- group 'root'
- mode '0644'
- source 'ufw/default.erb'
- cookbook 'firewall'
- end
- defaults_ufw.run_action(:create)
- new_resource.updated_by_last_action(true) if defaults_ufw.updated_by_last_action?
-
- return if ::File.exist?(ufw_rules_filename)
-
- ufw_file = lookup_or_create_rulesfile
- ufw_file.content '# created by chef to allow service to start'
- ufw_file.run_action(:create)
-
- new_resource.updated_by_last_action(true) if ufw_file.updated_by_last_action?
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- new_resource.rules['ufw'] = {} unless new_resource.rules['ufw']
-
- # this populates the hash of rules from firewall_rule resources
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- # build rules to apply with weight
- k = build_rule(firewall_rule)
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- unless new_resource.rules['ufw'].key?(k) && new_resource.rules['ufw'][k] == v
- new_resource.rules['ufw'][k] = v
- end
- end
-
- # ensure a file resource exists with the current ufw rules
- ufw_file = lookup_or_create_rulesfile
- ufw_file.content build_rule_file(new_resource.rules['ufw'])
- ufw_file.run_action(:create)
-
- # if the file was changed, restart iptables
- return unless ufw_file.updated_by_last_action?
- ufw_reset!
- ufw_logging!(new_resource.log_level) if new_resource.log_level
-
- new_resource.rules['ufw'].sort_by { |_k, v| v }.map { |k, _v| k }.each do |cmd|
- ufw_rule!(cmd)
- end
-
- # ensure it's enabled _after_ rules are inputted, to catch malformed rules
- ufw_enable! unless ufw_active?
- new_resource.updated_by_last_action(true)
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- ufw_file = lookup_or_create_rulesfile
- ufw_file.content '# created by chef to allow service to start'
- ufw_file.run_action(:create)
- new_resource.updated_by_last_action(true) if ufw_file.updated_by_last_action?
-
- return unless ufw_active?
- ufw_disable!
- new_resource.updated_by_last_action(true)
- end
-
- action :flush do
- return if disabled?(new_resource)
-
- ufw_reset!
- new_resource.updated_by_last_action(true)
-
- ufw_file = lookup_or_create_rulesfile
- ufw_file.content '# created by chef to allow service to start'
- ufw_file.run_action(:create)
- new_resource.updated_by_last_action(true) if ufw_file.updated_by_last_action?
- end
-
- def lookup_or_create_rulesfile
- begin
- ufw_file = Chef.run_context.resource_collection.find(file: ufw_rules_filename)
- rescue
- ufw_file = file ufw_rules_filename do
- action :nothing
- end
- end
- ufw_file
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_windows.rb b/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_windows.rb
deleted file mode 100644
index a886ec3a387164329adb01e9bcd63f7cf8ad0864..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/provider_firewall_windows.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-#
-# Author:: Sander van Harmelen ()
-# Cookbook:: firewall
-# Provider:: windows
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-class Chef
- class Provider::FirewallWindows < Chef::Provider::LWRPBase
- include FirewallCookbook::Helpers::Windows
-
- provides :firewall, os: 'windows'
-
- def whyrun_supported?
- false
- end
-
- action :install do
- return if disabled?(new_resource)
-
- svc = service 'MpsSvc' do
- action :nothing
- end
-
- [:enable, :start].each do |act|
- svc.run_action(act)
- new_resource.updated_by_last_action(true) if svc.updated_by_last_action?
- end
- end
-
- action :restart do
- return if disabled?(new_resource)
-
- # ensure it's initialized
- new_resource.rules({}) unless new_resource.rules
- new_resource.rules['windows'] = {} unless new_resource.rules['windows']
-
- firewall_rules = Chef.run_context.resource_collection.select { |item| item.is_a?(Chef::Resource::FirewallRule) }
- firewall_rules.each do |firewall_rule|
- next unless firewall_rule.action.include?(:create) && !firewall_rule.should_skip?(:create)
-
- # build rules to apply with weight
- k = build_rule(firewall_rule)
- v = firewall_rule.position
-
- # unless we're adding them for the first time.... bail out.
- unless new_resource.rules['windows'].key?(k) && new_resource.rules['windows'][k] == v
- new_resource.rules['windows'][k] = v
- end
- end
-
- input_policy = node['firewall']['windows']['defaults']['policy']['input']
- output_policy = node['firewall']['windows']['defaults']['policy']['output']
- unless new_resource.rules['windows'].key?("set currentprofile firewallpolicy #{input_policy},#{output_policy}")
- # Make this the possible last rule in the list
- new_resource.rules['windows']["set currentprofile firewallpolicy #{input_policy},#{output_policy}"] = 99999
- end
-
- # ensure a file resource exists with the current rules
- begin
- windows_file = Chef.run_context.resource_collection.find(file: windows_rules_filename)
- rescue
- windows_file = file windows_rules_filename do
- action :nothing
- end
- end
- windows_file.content build_rule_file(new_resource.rules['windows'])
- windows_file.run_action(:create)
-
- # if the file was changed, restart iptables
- return unless windows_file.updated_by_last_action?
-
- disable! if active?
- delete_all_rules! # clear entirely
- reset! # populate default rules
-
- new_resource.rules['windows'].sort_by { |_k, v| v }.map { |k, _v| k }.each do |cmd|
- add_rule!(cmd)
- end
- # ensure it's enabled _after_ rules are inputted, to catch malformed rules
- enable! unless active?
-
- new_resource.updated_by_last_action(true)
- end
-
- action :disable do
- return if disabled?(new_resource)
-
- if active?
- disable!
- Chef::Log.info("#{new_resource} disabled.")
- new_resource.updated_by_last_action(true)
- else
- Chef::Log.debug("#{new_resource} already disabled.")
- end
-
- svc = service 'MpsSvc' do
- action :nothing
- end
-
- [:disable, :stop].each do |act|
- svc.run_action(act)
- new_resource.updated_by_last_action(true) if svc.updated_by_last_action?
- end
- end
-
- action :flush do
- return if disabled?(new_resource)
-
- reset!
- Chef::Log.info("#{new_resource} reset.")
- new_resource.updated_by_last_action(true)
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall.rb b/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall.rb
deleted file mode 100644
index 3920f8ff7770798ff58f0665fb2c9304d226726b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-class Chef
- class Resource::Firewall < Chef::Resource::LWRPBase
- resource_name(:firewall)
- provides(:firewall)
- actions(:install, :restart, :disable, :flush, :save)
- default_action(:install)
-
- # allow both kinds of logic -- eventually remove the :disabled one.
- # the positive logic is much easier to follow.
- attribute(:disabled, kind_of: [TrueClass, FalseClass], default: false)
- attribute(:enabled, kind_of: [TrueClass, FalseClass], default: true)
-
- attribute(:log_level, kind_of: Symbol, equal_to: [:low, :medium, :high, :full, :off], default: :low)
- attribute(:rules, kind_of: Hash)
-
- # for firewalld, specify the zone when firewall is disable and enabled
- attribute(:disabled_zone, kind_of: Symbol, default: :public)
- attribute(:enabled_zone, kind_of: Symbol, default: :drop)
-
- # for firewall implementations where ipv6 can be skipped (currently iptables-specific)
- attribute(:ipv6_enabled, kind_of: [TrueClass, FalseClass], default: true)
-
- # allow override of package options for firewalld package
- attribute(:package_options, kind_of: String, default: nil)
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall_rule.rb b/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall_rule.rb
deleted file mode 100644
index 045a295ac4e4801937ddb8a8beeb4ba9bab27d42..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/libraries/resource_firewall_rule.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-require 'ipaddr'
-
-class Chef
- class Resource::FirewallRule < Chef::Resource::LWRPBase
- include FirewallCookbook::Helpers
-
- resource_name(:firewall_rule)
- provides(:firewall_rule)
- default_action(:create)
-
- attribute(:firewall_name, kind_of: String, default: 'default')
-
- attribute(:command, kind_of: Symbol, equal_to: [:reject, :allow, :deny, :masquerade, :redirect, :log], default: :allow)
-
- attribute(:protocol, kind_of: [Integer, Symbol], default: :tcp,
- callbacks: { 'must be either :tcp, :udp, :icmp, :\'ipv6-icmp\', :icmpv6, :none, or a valid IP protocol number' => lambda do |p|
- !!(p.to_s =~ /(udp|tcp|icmp|icmpv6|ipv6-icmp|esp|ah|ipv6|none)/ || (p.to_s =~ /^\d+$/ && p.between?(0, 142)))
- end })
- attribute(:direction, kind_of: Symbol, equal_to: [:in, :out, :pre, :post], default: :in)
- attribute(:logging, kind_of: Symbol, equal_to: [:connections, :packets])
-
- attribute(:source, kind_of: String, callbacks: { 'must be a valid ip address' => ->(ip) { !!IPAddr.new(ip) } })
- attribute(:source_port, kind_of: [Integer, Array, Range]) # source port
- attribute(:interface, kind_of: String)
-
- attribute(:port, kind_of: [Integer, Array, Range]) # shorthand for dest_port
- attribute(:destination, kind_of: String, callbacks: { 'must be a valid ip address' => ->(ip) { !!IPAddr.new(ip) } })
- attribute(:dest_port, kind_of: [Integer, Array, Range])
- attribute(:dest_interface, kind_of: String)
-
- attribute(:position, kind_of: Integer, default: 50)
- attribute(:stateful, kind_of: [Symbol, Array])
- attribute(:redirect_port, kind_of: Integer)
- attribute(:description, kind_of: String, name_attribute: true)
- attribute(:include_comment, kind_of: [TrueClass, FalseClass], default: true)
-
- # only used for firewalld
- attribute(:permanent, kind_of: [TrueClass, FalseClass], default: false)
-
- # only used for Windows Firewalls
- attribute(:program, kind_of: String)
- attribute(:service, kind_of: String)
-
- # for when you just want to pass a raw rule
- attribute(:raw, kind_of: String)
-
- # do you want this rule to notify the firewall to recalculate
- # (and potentially reapply) the firewall_rule(s) it finds?
- attribute(:notify_firewall, kind_of: [TrueClass, FalseClass], default: true)
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/metadata.json b/lc-gdn-chef/cookbooks/firewall/metadata.json
deleted file mode 100644
index c87865d3e73eca4d341220569e7f4fb0c417416d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/metadata.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "name": "firewall",
- "description": "Provides a set of primitives for managing firewalls and associated rules.",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "amazon": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "debian": ">= 0.0.0",
- "ubuntu": ">= 0.0.0",
- "windows": ">= 0.0.0"
- },
- "dependencies": {
- },
- "providing": {
- },
- "recipes": {
- },
- "version": "6.0.0",
- "source_url": "https://github.com/sous-chefs/firewall",
- "issues_url": "https://github.com/sous-chefs/firewall/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 15.5"
- ]
- ],
- "ohai_versions": [
- ],
- "gems": [
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/firewall/metadata.rb b/lc-gdn-chef/cookbooks/firewall/metadata.rb
deleted file mode 100644
index 23aa3803651685de1c62e27a15114ffe4e4b0d65..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/metadata.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-name 'firewall'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-license 'Apache-2.0'
-description 'Provides a set of primitives for managing firewalls and associated rules.'
-version '6.0.0'
-source_url 'https://github.com/sous-chefs/firewall'
-issues_url 'https://github.com/sous-chefs/firewall/issues'
-chef_version '>= 15.5'
-
-supports 'amazon'
-supports 'centos'
-supports 'debian'
-supports 'ubuntu'
-supports 'windows'
diff --git a/lc-gdn-chef/cookbooks/firewall/recipes/default.rb b/lc-gdn-chef/cookbooks/firewall/recipes/default.rb
deleted file mode 100644
index f99279462e9f0227521b8ec15a000c3afc1d6956..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/recipes/default.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Cookbook:: firewall
-# Recipe:: default
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-firewall 'default' do
- ipv6_enabled node['firewall']['ipv6_enabled']
- action :install
-end
-
-# create a variable to use as a condition on some rules that follow
-iptables_firewall = rhel? || amazon_linux? || node['firewall']['ubuntu_iptables']
-
-firewall_rule 'allow loopback' do
- interface 'lo'
- protocol :none
- command :allow
- only_if { linux? && node['firewall']['allow_loopback'] }
-end
-
-firewall_rule 'allow icmp' do
- protocol :icmp
- command :allow
- # debian ufw doesn't allow 'icmp' protocol, but does open
- # icmp by default, so we skip it in default recipe
- only_if { iptables_firewall && node['firewall']['allow_icmp'] }
-end
-
-firewall_rule 'allow world to ssh' do
- port 22
- source '0.0.0.0/0'
- only_if { linux? && node['firewall']['allow_ssh'] }
-end
-
-firewall_rule 'allow world to winrm' do
- port 5989
- source '0.0.0.0/0'
- only_if { windows? && node['firewall']['allow_winrm'] }
-end
-
-firewall_rule 'allow world to mosh' do
- protocol :udp
- port 60000..61000
- source '0.0.0.0/0'
- only_if { linux? && node['firewall']['allow_mosh'] }
-end
-
-# allow established connections, ufw defaults to this but iptables does not
-firewall_rule 'established' do
- stateful [:related, :established]
- protocol :none # explicitly don't specify protocol
- command :allow
- only_if { node['firewall']['allow_established'] && iptables_firewall }
-end
-
-# ipv6 needs ICMP to reliably work, so ensure it's enabled if ipv6
-# allow established connections, ufw defaults to this but iptables does not
-firewall_rule 'ipv6_icmp' do
- protocol :'ipv6-icmp'
- command :allow
- only_if { node['firewall']['ipv6_enabled'] && node['firewall']['allow_established'] && iptables_firewall }
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/recipes/disable_firewall.rb b/lc-gdn-chef/cookbooks/firewall/recipes/disable_firewall.rb
deleted file mode 100644
index a9cc12714525b1f14be02272d55d94e5c487f3c7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/recipes/disable_firewall.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Cookbook:: firewall
-# Recipe:: disable_firewall
-#
-# Copyright:: 2011-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Disable platform default firewall
-firewall 'default' do
- action :disable
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld.rb
deleted file mode 100644
index c805ea799b9d8d7687d7b67ec9c2f4260c38cc15..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-unified_mode true
-
-provides :firewalld,
- os: 'linux'
-
-action :install do
- chef_gem 'ruby-dbus'
- require 'dbus'
- package 'firewalld'
-end
-
-action :reload do
- service 'firewalld' do
- action :reload
- end
-end
-
-action :restart do
- service 'firewalld' do
- action :restart
- end
-end
-
-action :disable do
- service 'firewalld' do
- action [:disable, :stop]
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_config.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_config.rb
deleted file mode 100644
index 4860a42c4abf5eacf0ab13a89106f321dd9c676e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_config.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-unified_mode true
-
-provides :firewalld_config,
- os: 'linux'
-
-property :default_zone,
- String,
- description: 'Set default zone for connections and interfaces where no zone has been selected to zone. Setting the default zone changes the zone for the connections or interfaces, that are using the default zone.'
-property :log_denied,
- String,
- equal_to: %w(all unicast broadcast multicast off),
- description: 'Set LogDenied value to value. If LogDenied is enabled, then logging rules are added right before reject and drop rules in the INPUT, FORWARD and OUTPUT chains for the default rules and also final reject and drop rules in zones.'
-
-load_current_value do |_new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1']
- interface = firewalld_object['org.fedoraproject.FirewallD1']
-
- default_zone interface.getDefaultZone
- log_denied interface.getLogDenied
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
-
- converge_if_changed :default_zone do
- fw.setDefaultZone new_resource.default_zone
- end
-
- converge_if_changed :log_denied do
- fw.setLogDenied new_resource.log_denied
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_helpers.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_helpers.rb
deleted file mode 100644
index 44fdeb90d277ddc9b506f75cd4c23c489ab702f5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_helpers.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-unified_mode true
-
-provides :firewalld_helper,
- os: 'linux'
-
-property :version,
- String,
- default: '',
- description: 'see version attribute of helper tag in firewalld.helper(5).'
-property :short,
- String,
- name_property: true,
- description: 'see short tag in firewalld.helper(5).'
-property :description,
- String,
- description: 'see description tag in firewalld.helper(5).'
-property :family,
- String,
- equal_to: %w(ipv4 ipv6),
- default: 'ipv4',
- description: 'see family tag in firewalld.helper(5).'
-property :nf_module,
- String,
- description: 'see module tag in firewalld.helper(5).'
-property :ports,
- [Array, String],
- default: [],
- description: 'array of port and protocol pairs. See port tag in firewalld.helper(5).',
- coerce: proc { |o| Array(o) }
-
-load_current_value do |new_resource|
- dbus = DBus.system_bus
- firewalld_service = dbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getHelperNames.include?(new_resource.short)
- helper_path = fw_config.getHelperByName(new_resource.short)
- object = firewalld_service[helper_path]
- config_helper = object['org.fedoraproject.FirewallD1.config.helper']
- settings = config_helper.getSettings
- version settings[0]
- # short settings[1]
- description settings[2]
- family settings[3]
- nf_module settings[4]
- ports settings[5]
- else
- Chef::Log.info "Helper #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
- fw_config = config_interface(dbus)
- helper_names = fw_config.getHelperNames
- reload = false
- if !helper_names.include?(new_resource.short)
- values = [
- new_resource.version,
- new_resource.short,
- default_description(new_resource),
- new_resource.family,
- new_resource.nf_module,
- new_resource.ports.map { |e| e.split('/') },
- ]
- converge_by "Add Helper #{new_resource.short}" do
- fw_config.addHelper(new_resource.short, values)
- end
- reload = true
- else
- helper_path = fw_config.getHelperByName(new_resource.short)
- helper = helper_interface(dbus, helper_path)
- converge_if_changed :version do
- helper.setVersion new_resource.version
- reload = true
- end
- converge_if_changed :description do
- helper.setDescription default_description(new_resource)
- reload = true
- end
- converge_if_changed :family do
- helper.setFamily new_resource.family
- reload = true
- end
- converge_if_changed :nf_module do
- helper.setModule new_resource.nf_module
- reload = true
- end
- converge_if_changed :ports do
- helper.setPorts new_resource.ports.map { |e| e.split('/') }
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_icmptype.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_icmptype.rb
deleted file mode 100644
index 73f83dd528b4d2dc687e20f379ef8f65a97f38ec..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_icmptype.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-unified_mode true
-
-provides :firewalld_icmptype,
- os: 'linux'
-
-property :version,
- String,
- default: '',
- description: 'see version attribute of icmptype tag in firewalld.icmptype(5).'
-property :short,
- String,
- name_property: true,
- description: 'see short tag in firewalld.icmptype(5).'
-property :description,
- String,
- description: 'see description tag in firewalld.icmptype(5).'
-property :destinations,
- Array,
- equal_to: [['ipv4'], ['ipv6'], %w(ipv4 ipv6)],
- default: 'ipv4',
- description: 'array, either empty or containing strings \'ipv4\' and/or \'ipv6\', see destination tag in firewalld.icmptype(5).',
- coerce: proc { |o| Array(o) }
-
-load_current_value do |new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getIcmpTypeNames.include?(new_resource.short)
- icmptype_path = fw_config.getIcmpTypeByName(new_resource.short)
- object = firewalld_service[icmptype_path]
- config_icmptype = object['org.fedoraproject.FirewallD1.config.icmptype']
- settings = config_icmptype.getSettings
- version settings[0]
- # short settings[1]
- description settings[2]
- destinations settings[3]
- else
- Chef::Log.info "IcmpType #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw_config = config_interface(dbus)
- fw = firewalld_interface(dbus)
- reload = false
- icmptype_names = fw_config.getIcmpTypeNames
- if !icmptype_names.include?(new_resource.short)
- values = [
- new_resource.version,
- new_resource.short,
- default_description(new_resource),
- new_resource.destinations,
- ]
-
- converge_by "Add IcmpType #{new_resource.short}" do
- fw_config.addIcmpType(new_resource.short, values)
- end
- reload = true
- else
- icmptype_path = fw_config.getIcmpTypeByName(new_resource.short)
- icmptype = icmptype_interface(dbus, icmptype_path)
- converge_if_changed :version do
- icmptype.setVersion new_resource.version
- reload = true
- end
- converge_if_changed :description do
- icmptype.setDescription default_description(new_resource)
- reload = true
- end
- converge_if_changed :destinations do
- icmptype.setDestinations new_resource.destinations
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_ipset.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_ipset.rb
deleted file mode 100644
index afea6268d3410e163988ce436ccef9b931110b69..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_ipset.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-unified_mode true
-
-provides :firewalld_ipset,
- os: 'linux'
-
-property :version,
- String,
- description: 'see version attribute of ipset tag in firewalld.ipset(5).'
-property :short,
- String,
- name_property: true,
- description: 'see short tag in firewalld.ipset(5).'
-property :description,
- String,
- description: 'see description tag in firewalld.ipset(5).'
-property :type,
- String,
- default: 'hash:ip',
- description: 'see type attribute of ipset tag in firewalld.ipset(5).',
- equal_to:
- %w(hash:ip hash:ip,mark hash:ip,port hash:ip,port,ip hash:ip,port,net hash:mac hash:net hash:net,iface hash:net,net hash:net,port hash:net,port,net)
-property :options,
- Hash,
- description: 'hash of {option : value} . See options tag in firewalld.ipset(5).'
-property :entries,
- [Array, String],
- description: 'array of entries, see entry tag in firewalld.ipset(5).',
- coerce: proc { |o| Array(o) }
-
-load_current_value do |new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getIPSetNames.include?(new_resource.short)
- ipset_path = fw_config.getIPSetByName(new_resource.short)
- object = firewalld_service[ipset_path]
- config_ipset = object['org.fedoraproject.FirewallD1.config.ipset']
- settings = config_ipset.getSettings
- version settings[0]
- # short settings[1]
- description settings[2]
- type settings[3]
- options settings[4]
- entries settings[5]
- else
- Chef::Log.info "Ipset #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
- fw_config = config_interface(dbus)
- reload = false
- if !fw_config.getIPSetNames.include?(new_resource.short)
- values = [
- new_resource.version || '',
- new_resource.short,
- default_description(new_resource),
- new_resource.type,
- new_resource.options || {},
- new_resource.entries,
- ]
- converge_by "Add ipset #{new_resource.short}" do
- fw_config.addIPSet(new_resource.short, values)
- end
- reload = true
- else
- ipset_path = fw_config.getIPSetByName(new_resource.short)
- ipset = ipset_interface(dbus, ipset_path)
- converge_if_changed :version do
- ipset.setVersion new_resource.version
- reload = true
- end
- converge_if_changed :description do
- ipset.setDescriptions default_description(new_resource)
- reload = true
- end
- converge_if_changed :type do
- ipset.setType new_resource.type
- reload = true
- end
- converge_if_changed :options do
- ipset.setOptions(new_resource.options || {})
- reload = true
- end
- converge_if_changed :entries do
- ipset.setEntries new_resource.entries
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_policy.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_policy.rb
deleted file mode 100644
index 985d29c74d19049e8ed2e6eb3836a1b2d410393e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_policy.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-unified_mode true
-
-provides :firewalld_policy,
- os: 'linux'
-
-property :description,
- String,
- description: 'see description tag in firewalld.policy(5).'
-property :egress_zones,
- [Array, String],
- description: 'array of zone names. See egress-zone tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :forward_ports,
- [Array, String],
- description: 'array of `portid[-portid]:proto=protocol[:toport=portid[-portid]][:toaddr=address[/mask]]`. See forward-port tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :icmp_blocks,
- [Array, String],
- description: 'array of icmp-blocks. See icmp-block tag in firewalld.policy(5).'
-property :ingress_zones,
- [Array, String],
- description: 'array of zone names. See ingress-zone tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :masquerade,
- [true, false],
- description: 'see masquerade tag in firewalld.policy(5).'
-property :ports,
- [Array, String],
- description: 'array of port and protocol pairs. See port tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :priority,
- Integer,
- description: 'see priority tag in firewalld.policy(5).'
-property :protocols,
- [Array, String],
- description: 'array of protocols, see protocol tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :rich_rules,
- [Array, String],
- description: 'array of rich-language rules. See rule tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :services,
- [Array, String],
- description: 'array of service names, see service tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :short,
- String,
- description: 'see short tag in firewalld.policy(5).',
- name_property: true
-property :source_ports,
- [Array, String],
- description: 'array of port and protocol pairs. See source-port tag in firewalld.policy(5).',
- coerce: proc { |o| Array(o) }
-property :target,
- String,
- description: 'see target attribute of policy tag in firewalld.policy(5).'
-property :version,
- String,
- description: 'see version attribute of policy tag in firewalld.policy(5).'
-
-load_current_value do |new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getPolicyNames.include?(new_resource.short)
- policy_path = fw_config.getPolicyByName(new_resource.short)
- object = firewalld_service[policy_path]
- config_policy = object['org.fedoraproject.FirewallD1.config.policy']
- config_policy.getSettings.each do |k, v|
- send(k, v)
- end
- else
- Chef::Log.info "Zone #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
- fw_config = config_interface(dbus)
- reload = false
-
- unless fw_config.getPolicyNames.include?(new_resource.short)
- fw_config.addPolicy(new_resource.short, {})
- end
- policy_path = fw_config.getPolicyByName(new_resource.short)
- policy = policy_interface(dbus, policy_path)
- properties = new_resource.class.state_properties.map(&:name)
- properties.each do |property|
- new_value = new_resource.send(property)
- next if new_value.nil?
- if [:ports, :source_ports].include?(property)
- new_value = DBus.variant('a(ss)', new_value.map { |e| e.split('/') })
- elsif [:forward_ports].include?(property)
- new_value = forward_ports_to_dbus(new_resource)
- elsif [:priority].include?(property)
- new_value = DBus.variant('i', new_value)
- end
- converge_if_changed property do
- policy.update({ property.to_s => new_value })
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_service.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_service.rb
deleted file mode 100644
index 447912590097eef94f8cf9b859a1b39012505f8d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_service.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-unified_mode true
-
-provides :firewalld_service,
- os: 'linux'
-
-property :version,
- String,
- description: 'see version attribute of service tag in firewalld.service(5).'
-property :short,
- String,
- name_property: true,
- description: 'see short tag in firewalld.service(5).'
-property :description,
- String,
- description: 'see description tag in firewalld.service(5).'
-property :ports,
- [Array, String],
- description: 'array of port and protocol pairs. See port tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-property :module_names,
- [Array, String],
- description: 'array of kernel netfilter helpers, see module tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-property :destination,
- Hash,
- description: 'hash of {IP family : IP address} where \'IP family\' key can be either \'ipv4\' or \'ipv6\'. See destination tag in firewalld.service(5).'
-property :protocols,
- [Array, String],
- description: 'array of protocols, see protocol tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-property :source_ports,
- [Array, String],
- description: 'array of port and protocol pairs. See source-port tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-property :includes,
- [Array, String],
- description: 'array of service includes, see include tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-property :helpers,
- [Array, String],
- description: 'array of service helpers, see helper tag in firewalld.service(5).',
- coerce: proc { |o| Array(o) }
-
-load_current_value do |new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getServiceNames.include?(new_resource.short)
- service_path = fw_config.getServiceByName(new_resource.short)
- object = firewalld_service[service_path]
- config_service = object['org.fedoraproject.FirewallD1.config.service']
- config_service.getSettings2.each do |k, v|
- send(k, v)
- end
- else
- Chef::Log.info "Service #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
- fw_config = config_interface(dbus)
- reload = false
- unless fw_config.getServiceNames.include?(new_resource.short)
- fw_config.addService2(new_resource.short, {})
- end
-
- service_path = fw_config.getServiceByName(new_resource.short)
- service = service_interface(dbus, service_path)
- properties = new_resource.class.state_properties.map(&:name)
- properties.each do |property|
- new_value = new_resource.send(property)
- next unless new_value
- if [:ports, :source_ports].include?(property)
- new_value = DBus.variant('a(ss)', new_value.map { |e| e.split('/') })
- elsif property == :description
- new_value = default_description(new_resource)
- end
- converge_if_changed property do
- key = property == :short ? 'name' : property.to_s
- service.update2({ key => new_value })
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_zone.rb b/lc-gdn-chef/cookbooks/firewall/resources/firewalld_zone.rb
deleted file mode 100644
index d4841721e60580f19f563755da68286b8c9e2e38..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/firewalld_zone.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-unified_mode true
-
-provides :firewalld_zone,
- os: 'linux'
-
-property :description,
- String,
- description: 'see description tag in firewalld.zone(5).'
-property :forward,
- [true, false],
- description: 'see forward tag in firewalld.zone(5).'
-property :forward_ports,
- [Array, String],
- description: 'array of (port, protocol, to-port, to-addr). See forward-port tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :icmp_block_inversion,
- [true, false],
- description: 'see icmp-block-inversion tag in firewalld.zone(5).'
-property :icmp_blocks,
- [Array, String],
- description: 'array of icmp-blocks. See icmp-block tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :interfaces,
- [Array, String],
- description: 'array of interfaces. See interface tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :masquerade,
- [true, false],
- description: 'see masquerade tag in firewalld.zone(5).'
-property :ports,
- [Array, String],
- description: 'array of port and protocol pairs. See port tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :protocols,
- [Array, String],
- description: 'array of protocols, see protocol tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :rules_str,
- [Array, String],
- description: 'array of rich-language rules. See rule tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :services,
- [Array, String],
- description: 'array of service names, see service tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :short,
- String,
- name_property: true,
- description: 'see short tag in firewalld.zone(5).'
-property :source_ports,
- [Array, String],
- description: 'array of port and protocol pairs. See source-port tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :sources,
- [Array, String],
- description: 'array of source addresses. See source tag in firewalld.zone(5).',
- coerce: proc { |o| Array(o) }
-property :target,
- String,
- description: 'see target attribute of zone tag in firewalld.zone(5).'
-property :version,
- String,
- description: 'see version attribute of zone tag in firewalld.zone(5).'
-
-load_current_value do |new_resource|
- sysbus = DBus.system_bus
- firewalld_service = sysbus['org.fedoraproject.FirewallD1']
- firewalld_object = firewalld_service['/org/fedoraproject/FirewallD1/config']
- fw_config = firewalld_object['org.fedoraproject.FirewallD1.config']
- if fw_config.getZoneNames.include?(new_resource.short)
- zone_path = fw_config.getZoneByName(new_resource.short)
- object = firewalld_service[zone_path]
- config_zone = object['org.fedoraproject.FirewallD1.config.zone']
- config_zone.getSettings2.each do |k, v|
- send(k, v)
- end
- else
- Chef::Log.info "Zone #{new_resource.short} does not exist. Will be created."
- end
-end
-
-action :update do
- dbus = DBus.system_bus
- fw = firewalld_interface(dbus)
- fw_config = config_interface(dbus)
-
- unless fw_config.getZoneNames.include?(new_resource.short)
- fw_config.addZone2(new_resource.short, {})
- end
- zone_path = fw_config.getZoneByName(new_resource.short)
- zone = zone_interface(dbus, zone_path)
-
- reload = false
- properties = new_resource.class.state_properties.map(&:name)
- properties.each do |property|
- new_value = new_resource.send(property)
- next unless new_value
- if [:ports, :source_ports].include?(property)
- new_value = DBus.variant('a(ss)', new_value.map { |e| e.split('/') })
- elsif [:forward_ports].include?(property)
- new_value = forward_ports_to_dbus(new_resource)
- end
- converge_if_changed property do
- zone.update2({ property.to_s => new_value })
- reload = true
- end
- end
-
- if reload
- converge_by ['reload permanent configuration of firewalld'] do
- fw.reload
- end
- end
-end
-
-action_class do
- include FirewallCookbook::Helpers::FirewalldDBus
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/nftables.rb b/lc-gdn-chef/cookbooks/firewall/resources/nftables.rb
deleted file mode 100644
index c6e1c4406e536c14a4df6ef1f104de00825c2c92..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/nftables.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-unified_mode true
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::Nftables
-end
-
-provides :nftables,
- os: 'linux'
-
-property :rules,
- Hash,
- default: {}
-property :input_policy,
- String,
- equal_to: %w(drop accept),
- default: 'accept'
-property :output_policy,
- String,
- equal_to: %w(drop accept),
- default: 'accept'
-property :forward_policy,
- String,
- equal_to: %w(drop accept),
- default: 'accept'
-property :table_ip_nat,
- [true, false],
- default: false
-property :table_ip6_nat,
- [true, false],
- default: false
-
-action :install do
- package 'nftables' do
- action :install
- notifies :rebuild, "nftables[#{new_resource.name}]"
- end
-end
-
-action :rebuild do
- ensure_default_rules_exist(new_resource)
-
- file '/etc/nftables.conf' do
- content <<~NFT
- #!/usr/sbin/nft -f
- flush ruleset
- #{build_rule_file(new_resource.rules)}
- NFT
- mode '0750'
- owner 'root'
- group 'root'
- notifies :restart, 'service[nftables]'
- end
-
- service 'nftables' do
- action [:enable, :start]
- end
-end
-
-action :restart do
- service 'nftables' do
- action :restart
- end
-end
-
-action :disable do
- service 'nftables' do
- action [:disable, :stop]
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/resources/nftables_rule.rb b/lc-gdn-chef/cookbooks/firewall/resources/nftables_rule.rb
deleted file mode 100644
index 89c12a847e73899a9e7988b0672a8ffe2ede42ba..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/resources/nftables_rule.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-unified_mode true
-
-require 'ipaddr'
-
-action_class do
- include FirewallCookbook::Helpers
- include FirewallCookbook::Helpers::Nftables
-
- def return_early?(new_resource)
- !new_resource.notify_firewall ||
- !(new_resource.action.include?(:create) &&
- !new_resource.should_skip?(:create))
- end
-end
-
-provides :nftables_rule
-default_action :create
-
-property :firewall_name,
- String,
- default: 'default'
-property :command,
- [Array, Symbol],
- default: :accept
-property :protocol,
- [Integer, Symbol],
- default: :tcp,
- callbacks: {
- 'must be either :tcp, :udp, :icmp, :\'ipv6-icmp\', :icmpv6, :none, or a valid IP protocol number' => lambda do |p|
- %i(udp tcp icmp icmpv6 ipv6-icmp esp ah ipv6 none).include?(p) || (0..142).include?(p)
- end,
- }
-property :direction,
- Symbol,
- equal_to: [:in, :out, :pre, :post, :forward],
- default: :in
-# nftables handles ip6 and ip simultaneously. Except for directions
-# :pre and :post, where where either :ip6 or :ip must be specified.
-# callback should prevent from mixing that up.
-property :family,
- Symbol,
- equal_to: [:ip6, :ip],
- default: :ip
-property :source,
- [String, Array],
- callbacks: {
- 'must be a valid ip address' => lambda do |ips|
- Array(ips).inject(false) do |a, ip|
- a || !!IPAddr.new(ip)
- end
- end,
- }
-property :sport,
- [Integer, String, Array, Range]
-property :interface,
- String
-
-property :dport,
- [Integer, String, Array, Range]
-property :destination,
- [String, Array],
- callbacks: {
- 'must be a valid ip address' => lambda do |ips|
- Array(ips).inject(false) do |a, ip|
- a || !!IPAddr.new(ip)
- end
- end,
- }
-property :outerface,
- String
-
-property :position,
- Integer,
- default: 50
-property :stateful,
- [Symbol, Array]
-property :redirect_port,
- Integer
-property :description,
- String,
- name_property: true
-property :include_comment,
- [true, false],
- default: true
-property :log_prefix,
- String
-property :log_group,
- Integer
-# for when you just want to pass a raw rule
-property :raw,
- String
-
-# do you want this rule to notify the firewall to recalculate
-# (and potentially reapply) the firewall_rule(s) it finds?
-property :notify_firewall,
- [true, false],
- default: true
-
-action :create do
- return if return_early?(new_resource)
- fwr = build_firewall_rule(new_resource)
-
- with_run_context :root do
- edit_resource!('nftables', new_resource.firewall_name) do |fw_rule|
- r = rules.dup || {}
- r.merge!({
- fwr => fw_rule.position,
- })
- rules(r)
- delayed_action :rebuild
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/firewall/templates/default/ufw/default.erb b/lc-gdn-chef/cookbooks/firewall/templates/default/ufw/default.erb
deleted file mode 100644
index 5e6e9c2be681a57c508d6a5656b2d117909e71c5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/firewall/templates/default/ufw/default.erb
+++ /dev/null
@@ -1,13 +0,0 @@
-# /etc/default/ufw
-# This file is managed by Chef. Do not edit.
-
-IPV6=<%= node['firewall']['ufw']['defaults']['ipv6'] %>
-MANAGE_BUILTINS=<%= node['firewall']['ufw']['defaults']['manage_builtins'] %>
-
-<% node['firewall']['ufw']['defaults']['policy'].each do |policy, value| -%>
-<%= "DEFAULT_#{policy.upcase}_POLICY=\"#{value}\"" %>
-<% end -%>
-
-IPT_SYSCTL="<%= node['firewall']['ufw']['defaults']['ipt_sysctl'] %>"
-
-IPT_MODULES="<%= node['firewall']['ufw']['defaults']['ipt_modules'] %>"
diff --git a/lc-gdn-chef/cookbooks/haproxy/.markdownlint.json b/lc-gdn-chef/cookbooks/haproxy/.markdownlint.json
deleted file mode 100644
index 95b5094c1871744165cc6bafec1e3bb8f542ebb9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/.markdownlint.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "MD024": {
- "siblings_only": true
- },
- "MD013":false
-}
diff --git a/lc-gdn-chef/cookbooks/haproxy/CHANGELOG.md b/lc-gdn-chef/cookbooks/haproxy/CHANGELOG.md
deleted file mode 100644
index 68c407f14b6f4931d0efb416a99a2bf67e0a16a0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/CHANGELOG.md
+++ /dev/null
@@ -1,769 +0,0 @@
-# haproxy Cookbook CHANGELOG
-
-This file is used to list changes made in each version of the haproxy cookbook.
-
-## 12.2.3 - *2022-04-21*
-
-- Remove delivery folder
-- Standardise files with files in sous-chefs/repo-management
-- Migrate to new workflow pipelines
-
-## 12.2.2 - *2021-10-05*
-
-- Update supported platforms in README.md
-
-## 12.2.1 - *2021-08-30*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 12.2.0 - *2021-08-11*
-
-- Add `use_promex` property to install.rb to support compiling with Prometheus Exporter support - [@Wicaeed](https://github.com/wicaeed)
-
-## 12.1.0 - *2021-06-14*
-
-- Add `ssl_lib` and `ssl_inc` properties to `haproxy_install` to support openssl - [@derekgroh](https://github.com/derekgroh)
-
-## 12.0.1 - *2021-06-01*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 12.0.0 - *2021-05-13*
-
-- Refactor to use resource partials
-- Add delete action to most resources
-- Convert `install` resource boolean strings to true/false
-- Ensure section is created before adding an ACL
-
-## 11.0.0 - *2021-05-07*
-
-- Drop testing for Debian 8, Ubuntu 16.04 & Ubuntu 18.04
-- Add testing for Debian 9 Ubuntu 20.04 & Ubuntu 21.04
-- Fix the minimum Chef version to 15.3
- unified_mode was introduced in 15.3
-- Change kitchen to use the Bento provided Amazonlinux2 image
-- Fix test suite
-
-## 10.0.1 - *2021-04-26*
-
-- Add missing configuration file properties to all resources
-
-## 10.0.0 - *2021-04-24*
-
-- Add configuration test function to the service resource - [@bmhughes](https://github.com/bmhughes)
-- Fix generating multiple actions from the service resource - [@bmhughes](https://github.com/bmhughes)
-- Kitchen test with CentOS 8/8 stream - [@bmhughes](https://github.com/bmhughes)
-- Fix IUS repo causing a run failure on an unsupported platform - [@bmhughes](https://github.com/bmhughes)
-- Move configuration resource creation into resource helper module - [@bmhughes](https://github.com/bmhughes)
-
-## [v9.1.0] (2020-10-07)
-
-### Added
-
-- testing for haproxy 2.2
-
-### Removed
-
-- testing for haproxy 1.9 & 2.1
-
-## [v9.0.1] (2020-09-15)
-
-### Added
-
-- added lua compilation flags to `haproxy_install` resource
-
-### Fixed
-
-- resolved cookstyle error: libraries/helpers.rb:19:24 refactor: `ChefCorrectness/InvalidPlatformFamilyInCase`
-- Updated IUS repo url to `https://repo.ius.io/ius-release-el7.rpm`
-
-### Changed
-
-- Turn on unified_mode for all resources
-
-## [v9.0.0] (2020-02-21)
-
-### Changed
-
-- Removed `depends_on` build-essential, as this is now in Chef Core
-
-### Fixed
-
-- Cookstyle fixes for cookstyle version 5.20
-
-## [v8.3.0] (2020-01-09)
-
-### Added
-
-- on `haproxy_install` epel is now a configurable option
-
-### Changed
-
-- Migrated testing to github actions
-
-### Fixed
-
-- ius repo will only echo out if enabled
-
-## [v8.2.0] (2019-12-23)
-
-### Added
-
-- `fastcgi` resource to support FastCGI applications
-
-### Changed
-
-- Default source install version is haproxy 2.1
-
-### Fixed
-
-- Bug with single newline between resources when multiple of the same type are defined
-
-### Removed
-
-- `.foodcritic` as it is no longer run by deliver local.
-- `.rubocop.yml` as no longer required.
-
-## [v8.1.1] (2019-10-02)
-
-### Changed
-
-- Updated `config_defaults` resourcce `stats` property default value to empty hash.
-- Updated metadata.rb chef_version to >=13.9 due to resource `description`.
-
-## [v8.1.0] (2019-06-24)
-
-### Changed
-
-- Updated build target to linux-glibc for haproxy 2.0 compatibility.
-- Updated integration tests to cover haproxy 2.0.
-- Moved install resource target_os check to libraries.
-
-## [v8.0.0] (2019-05-29)
-
-### Added
-
-- The bind config hash joins with a space instead of a colon.
-- The peer resource.
-- The mailer resource.
-
-## [v7.1.0] (2019-04-16)
-
-### Changed
-
-- Clean up unused templates and files.
-
-### Fixed
-
-- Name conflict with systemd_unit in service resource.
-
-## [v7.0.0] (2019-04-10)
-
-### Added
-
-- `health` to allowed values for `mode` on `frontend`, `backend`, `listen`, `default`.
-- `apt-update` for debian platforms.
-- ius repo for CentOS and Redhat package installations (resolves #348).
-
-### Changed
-
-- Clean up unit and integration test content regular expressions.
-- Move system command to a helper.
-- Support only systemd init systems.
-
-### Removed
-
-- Remove `poise_service` dependency in favor of systemd_unit.
-
-### Fixed
-
-- Fix cookbook default value in `config_global`.
-
-## [v6.4.0] (2019-03-20)
-
-### Changed
-
-- Move resource documentation to dedicated folder with md per resource.
-- Rename haproxy_cache `name` property as `cache_name`.
-
-### Fixed
-
-- Source installs on CentOS 6.
-
-## [v6.3.0] (2019-02-18)
-
-### Added
-
-- Haproxy_cache resource for caching small objects with HAProxy version >=1.8.
-
-### Changed
-
-- Expand integration test coverage to all stable and LTS HAProxy versions.
-- Documentation - clarify extra_options hash string => array option.
-- Clarify the supported platforms - add AmazonLinux 2, remove fedora & freebsd.
-
-## [v6.2.7] (2019-01-10)
-
-### Added
-
-- Test for appropriate spacing from start of line and end of line.
-- `hash_type` param to `haproxy_backend`, `haproxy_listen`, and `haproxy_config_defaults` resources.
-- `reqirep` and `reqrep` params to `haproxy_backend`, `haproxy_frontend`, and `haproxy_listen` resources.
-- `sensitive` param to `haproxy_install`; set to false to show diff output during Chef run.
-
-### Changed
-
-- Allow passing an array to `haproxy_listen`'s `http_request` param.
-
-### Fixed
-
-- Fix ordering for `haproxy_listen`: `acl` directives should be applied before `http-request`.
-
-## [v6.2.6] (2018-11-05)
-
-### Changed
-
-- Put `http_request` rules before the `use_backend`.
-
-## [v6.2.5] (2018-10-09)
-
-### Added
-
-- rspec examples for resource usage.
-
-### Removed
-
-- Chef-12 support.
-- CPU cookbook dependency.
-
-### Fixed
-
-- Systemd wrapper, the wrapper is no longer included with haproxy versions greater than 1.8.
-
-## [v6.2.4] (2018-09-19)
-
-### Added
-
-- Server property to listen resource and config template.
-
-## [v6.2.3] (2018-08-03)
-
-### Removed
-
-- A few resource default values so they can be specified in the haproxy.cfg default section and added service reload exmample to the readme for config changes.
-
-## [v6.2.2] (2018-08-03)
-
-### Changed
-
-- Made `haproxy_install` `source_url` property dynamic with `source_version` property and removed the need to specify checksum #307.
-
-## [v6.2.1] (2018-08-01)
-
-### Added
-
-- Compiling from source crypt support #305.
-
-## [v6.2.0] (2018-05-11)
-
-### Changed
-
-- Require Chef 12.20 or later.
-- Uses the build_essential resource not the default recipe so the cookbook can be skipped entirely if running on Chef 14+.
-
-## [v6.1.0] (2018-04-12)
-
-### **Breaking changes**
-
-### Added
-
-- `haproxy_service` resource see test suites for usage.
-- Support for haproxy 1.8.
-- Test haproxy version 1.8.7 and 1.7.8.
-- Test on chef-client version 13.87 and 14.
-- Notes on how we generate the travis.yml list.
-
-### Changed
-
-- Require Chef 12.20 or later.
-- Uses the build_essential resource not the default recipe so the cookbook can be skipped entirely if running on Chef 14+.
-- Simplify the kitchen matrix.
-- Use default action in tests (:create).
-- Set the use_systemd property from the init package system.
-- Adding in systemd for SUSE Linux.
-
-### Removed
-
-- `kitchen.dokken.yml` suites and inherit from kitchen.yml.
-- Amazon tests until a new dokken image is produced that is reliable.
-
-### Fixed
-
-- Source comparison.
-
-## [v6.0.0] (2018-03-28)
-
-### Removed
-
-- `compat_resource` cookbok dependency and push the required Chef version to 12.20
-
-## [v5.0.4] (2018-03-28)
-
-### Changed
-
-- Make 1.8.4 the default installed version (#279)
-- Use dokken docker images
-- Update tests for haproxy service
-- tcplog is now a valid input for the `haproxy_config_defaults` resource (#284)
-- bin prefix is now reflected in the service config. (#288, #289)
-
-## [v5.0.3] (2018-02-02)
-
-### Fixed
-
-- `foodcritic` warning for not defining `name_property`.
-
-## [v5.0.2] (2017-11-29)
-
-### Fixed
-
-- Typo in listen section, makes previously unprintable expressions, printable in http-request, http-response and `default_backend`.
-
-## [v5.0.1] (2017-08-10)
-
-### Removed
-
-- useless blank space in generated config file haproxy.cfg
-
-## [v5.0.0] (2017-08-07)
-
-### Added
-
-- Option for install only #251.
-
-### Changed
-
-- Updating service to use cookbook template.
-- updating to haproxy 1.7.8, updating `source_version` in test files(kitchen,cookbook, etc)
-- updating properties to use `new_resource`
-
-### Fixed
-
-- `log` `property` in `global` resource can now be of type `Array` or `String`. This fixes #252
-- fixing supports line #258
-
-## [v4.6.1] (2017-08-02)
-
-### Changed
-
-- Reload instead of restart on config change
-- Specify -sf argument last to support haproxy < 1.6.0
-
-## [v4.6.0] (2017-07-13)
-
-### Added
-
-- `conf_template_source`
-- `conf_cookbook`
-- Support Array value for `extra_options` entries. (#245, #246)
-
-## [v4.5.0] (2017-06-29)
-
-### Added
-
-- `resolver` resource (#240)
-
-## [v4.4.0] (2017-06-28)
-
-### Added
-
-- `option` as an Array `property` for `backend` resource. This fixes #234
-- Synced Debian/Ubuntu init script with latest upstream package changes
-
-## [v4.3.1] (2017-06-13)
-
-### Added
-
-- Oracle Linux 6 support
-
-### Removed
-
-- Scientific linux support as we don't have a reliable image
-
-## [v4.3.0] (2017-05-31)
-
-### Added
-
-- Chefspec Matchers for the resources defined in this cookbook.
-- `mode` property to `backend` and `frontend` resources.
-- `maxconn` to `global` resource.
-
-### Removed
-
-- `default_backend` as a required property on the `frontend` resource.
-
-## [v4.2.0] (2017-05-04)
-
-### Added
-
-- In `acl` resource, usage: `test/fixtures/cookbooks/test/recipes/config_acl.rb`
-- In `use_backend` resource, usage: `test/fixtures/cookbooks/test/recipes/config_acl.rb`
-- `acl` and `use_backend` to `listen` resource.
-- Amazon Linux as a supported platform.
-
-### Changed
-
-- Pinned `build-essential`, `>= 8.0.1`
-- Pinned `poise-service`, `>= 1.5.1`
-- Cleaned up arrays in `templates/default/haproxy.cfg.erb`
-
-### Fixed
-
-- Init script for Amazon Linux.
-
-### BREAKING CHANGES
-
-- This version removes `stats_socket`, `stats_uri` and `stats_timeout` properties from the `haproxy_global` and `haproxy_listen` resources in favour of using a hash to pass configuration options.
-
-## [v4.1.0] (2017-05-01)
-
-### Added
-
-- `userlist` resource, to see usage: `test/fixtures/cookbooks/test/recipes/config_1_userlist.rb`
-- chef-search example in: `test/fixtures/cookbooks/test/recipes/config_backend_search.rb`
-- Multiple addresses and ports on listener and frontend (#205)
-
-### Changed
-
-- Updating source install test to take node attributes as haproxy.org is slow.
-
-### Fixed
-
-- `haproxy_retries` in `haproxy_config_defaults` resource
-
-## [v4.0.2] (2017-04-21)
-
-### Fixed
-
-- haproxy service start on Ubuntu 14.04 (#199)
-- Reload HAProxy when changing configuration (#197)
-
-## [v4.0.1] (2017-04-20)
-
-### Added
-
-- Updating README.md
-- Adding compat_resource for chef-12 support
-- Improvement when rendering the configuration file (#196)
-
-## [v4.0.0] (2017-04-18)
-
-### COMPATIBILIY WARNING
-
-- This version removes the existing recipes, attributes, and instance provider in favor of the new haproxy_install and haproxy_ configuration resources. Why not just leave them in place? Well unfortunately they were utterly broken for anything other than the most trivial usage. Rather than continue the user pain we've opted to remove them and point users to a more modern installation method. If you need the legacy installation methods simply pin to the 3.0.4 release.
-- THIS IS GOING TO BREAK EVERYTHING YOU KNOW AND LOVE
-- 12.5 or greater rewrite
-- Custom Resource Only, no recipes
-
-## [v3.0.4] (2017-03-29)
-
-### Fixed
-
-- Bug introduced in (#174) (#182)
-
-## [v3.0.3] (2017-03-28)
-
-### Added
-
-- Multiple addresses and ports on listener and frontend (#174)
-- Customize logging destination (#178)
-
-### Changed
-
-- Updating to use bats/serverspec (#179)
-
-## [v3.0.2] (2017-03-27)
-
-### Added
-
-- Allow server startup from `app_lb` recipe. (#171)
-- Use Delivery instead of Rake
-- Make this cookbook compatible with Chef-13, note: `params` option is now `parameters` (#175)
-
-## [v3.0.1] (2017-01-30)
-
-### Added
-
-- Reload haproxy configuration on changes (#152)
-- Merging in generic socket conf (#107)
-- Updating config to use facilities hash dynamically (#102)
-- Adding `tproxy` and splice per (#98)
-
-### Removed
-
-- Members with nil ips from member array. (#79)
-
-## [v3.0.0] (2017-01-24)
-
-### Added
-
-- Configurable debug options
-- CentOS7 compatibility (#123)
-- Adding poise-service for service management
-
-### Changed
-
-- Updating source install to use Haproxy 1.7.2
-- Chef >= 12.1 required
-- Use `['haproxy']['source']['target_cpu']` instead of `['haproxy']['source']['target_os']` to detect correct architecture. (#150)
-
-## [v2.0.2] (2016-12-30)
-
-### Fixed
-
-- Cookstyle
-- The github URL for the repo in various locations
-
-### Changed
-
-- Travis testing updates
-- Converted file modes to strings
-- Updated the config resource to lazily evaluate node attribute values to better load the values when overridden in wrapper cookbooks
-
-## v2.0.1 (2016-12-08)
-
-### Fixed
-
-- Dynamic configuration to properly template out frontend and backend sections
-
-### Chnaged
-
-- Update Chef Brigade to Sous Chefs
-- Updated contributing docs to remove the reference to the develop branch
-
-## v2.0.0 (2016-11-09)
-
-### Breaking Changes
-
-- The default recipe is now an empty recipe with manual configuration performed in the 'manual' recipe
-- Remove Chef 10 compatibility code
-- Switch from Librarian to Berksfile
-- Updated the source recipe to install 1.6.9 by default
-
-### Added
-
-- Migrated this cookbook from Heavy Water to Chef Brigade so we can ensure more frequent releases and maintenance
-- A code of conduct for the project. Read it.
-- Several new syslog configuration attributes
-- A new attribute for stats_socket_level
-- A new attribute for retries
-- A chefignore file to speed up syncs from the server
-- Scientific and oracle as supported platforms in the metadata
-- source_url, issues_url, and chef_version metadata
-- Enabled why-run support in the default haproxy resource
-- New haproxy_config resource
-- Guardfile
-- Testing in Travis CI with a Rakefile that runs cookstyle, foodcritic, and ChefSpec as well as a Kitchen Dokken config that does integration testing of the package install
-- New node['haproxy']['pool_members'] and node['haproxy']['pool_members_option'] attributes
-
-### Changed
-
-- The haproxy config is now verified before the service restarts / reloads to prevent taking down haproxy with a bad config
-- Update the Kitchen config file to use Bento boxes and new platforms
-- Update ChefSpec matchers to use the latest format
-- Broke search logic out into a new_discovery recipe
-
-### Removed
-
-- Attributes from the metadata file as these are redundant
-- Broken tarball validation in the source recipe to prevented installs from completing
-
-### Fixed
-
-- Source installs not running if an older version was present on the node
-- Resolved all cookstyle and foodcritic warnings
-
-## v1.6.7
-
-### Added
-
-- ChefSpec matchers and test coverage
-
-### Changed
-
-- Replaced references to Opscode with Chef
-
-## v1.6.6
-
-### Changed
-
-- Parameterize options for admin listener.
-- Renamed templates/rhel to templates/redhat.
-- Sort pool members by hostname to avoid needless restarts.
-- Support amazon linux init script.
-- Support to configure global options.
-
-### Fixed
-
-- CPU Tuning, corrects cpu_affinity resource triggers
-
-## v1.6.4
-
-## v1.6.2
-
-### Added
-
-- [COOK-3135](https://tickets.chef.io/browse/COOK-3135) - Allow setting of members with default recipe without changing the template.
-
-### Fixed
-
-- [COOK-3424](https://tickets.chef.io/browse/COOK-3424) - Haproxy cookbook attempts to alter an immutable attribute.
-
-## v1.6.0
-
-### Added
-
-- Allow setting of members with default recipe without changing the template.
-
-## v1.5.0
-
-### Added
-
-- [COOK-3660](https://tickets.chef.io/browse/COOK-3660) - Make haproxy socket default user group configurable
-- [COOK-3537](https://tickets.chef.io/browse/COOK-3537) - Add OpenSSL and zlib source configurations
-- [COOK-2384](https://tickets.chef.io/browse/COOK-2384) - Add LWRP for multiple haproxy sites/configs
-
-## v1.4.0
-
-### Added
-
-- [COOK-3237](https://tickets.chef.io/browse/COOK-3237) - Enable cookie-based persistence in a backend
-- [COOK-3216](https://tickets.chef.io/browse/COOK-3216) - Metadata attributes
-- [COOK-3211](https://tickets.chef.io/browse/COOK-3211) - Support RHEL
-- [COOK-3133](https://tickets.chef.io/browse/COOK-3133) - Allow configuration of a global stats socket
-
-## v1.3.2
-
-### Fixed
-
-- [COOK-3046]: haproxy default recipe broken by COOK-2656.
-
-### Added
-
-- [COOK-2009]: Test-kitchen support to haproxy.
-
-## v1.3.0
-
-### Changed
-
-- [COOK-2656]: Unify the haproxy.cfg with that from `app_lb`.
-
-### Added
-
-- [COOK-1488]: Provide an option to build haproxy from source.
-
-## v1.2.0
-
-### Added
-
-- [COOK-1936] - use frontend / backend logic.
-- [COOK-1937] - cleanup for configurations.
-- [COOK-1938] - more flexibility for options.
-- [COOK-1939] - reloading haproxy is better than restarting.
-- [COOK-1940] - haproxy stats listen on 0.0.0.0 by default.
-- [COOK-1944] - improve haproxy performance.
-
-## v1.1.4
-
-### Added
-
-- [COOK-1839] - `httpchk` configuration to `app_lb` template.
-
-## v1.1.0
-
-### Changed
-
-- [COOK-1275] - haproxy-default.erb should be a cookbook_file.
-
-### Fixed
-
-- [COOK-1594] - Template-Service ordering issue in `app_lb` recipe.
-
-## v1.0.6
-
-### Changed
-
-- [COOK-1310] - Redispatch flag has changed.
-
-## v1.0.4
-
-### Changed
-
-- [COOK-806] - Load balancer should include an SSL option.
-- [COOK-805] - Fundamental haproxy load balancer options should be configurable.
-
-## v1.0.3
-
-### Changed
-
-- [COOK-620] `haproxy::app_lb`'s template should use the member cloud private IP by default.
-
-## v1.0.2
-
-### Fixed
-
-- Regression introduced in v1.0.1.
-
-## v1.0.1
-
-### Added
-
-- Account for the case where load balancer is in the pool.
-
-## v1.0.0
-
-### Changed
-
-- Use `node.chef_environment` instead of `node['app_environment']`.
-
-[10.0.0 - *2021-04-24*]: https://github.com/sous-chefs/haproxy/compare/v6.4.0...HEAD
-[v3.0.0]: https://github.com/sous-chefs/haproxy/compare/v2.0.2...v3.0.0
-[v3.0.1]: https://github.com/sous-chefs/haproxy/compare/v3.0.0...v3.0.1
-[v3.0.2]: https://github.com/sous-chefs/haproxy/compare/v3.0.1...v3.0.2
-[v3.0.3]: https://github.com/sous-chefs/haproxy/compare/v3.0.2...v3.0.3
-[v3.0.4]: https://github.com/sous-chefs/haproxy/compare/v3.0.3...v3.0.4
-[v4.0.0]: https://github.com/sous-chefs/haproxy/compare/v3.0.4...v4.0.0
-[v4.0.1]: https://github.com/sous-chefs/haproxy/compare/v4.0.0...v4.0.1
-[v4.0.2]: https://github.com/sous-chefs/haproxy/compare/v4.0.1...v4.0.2
-[v4.1.0]: https://github.com/sous-chefs/haproxy/compare/v4.0.2...v4.1.0
-[v4.2.0]: https://github.com/sous-chefs/haproxy/compare/v4.1.0...v4.2.0
-[v4.3.0]: https://github.com/sous-chefs/haproxy/compare/v4.2.0...v4.3.0
-[v4.3.1]: https://github.com/sous-chefs/haproxy/compare/v4.3.0...v4.3.1
-[v4.4.0]: https://github.com/sous-chefs/haproxy/compare/v4.3.1...v4.4.0
-[v4.5.0]: https://github.com/sous-chefs/haproxy/compare/v4.4.0...v4.5.0
-[v4.6.0]: https://github.com/sous-chefs/haproxy/compare/v4.5.0...v4.6.0
-[v4.6.1]: https://github.com/sous-chefs/haproxy/compare/v4.6.0...v4.6.1
-[v5.0.0]: https://github.com/sous-chefs/haproxy/compare/v4.6.1...v5.0.0
-[v5.0.1]: https://github.com/sous-chefs/haproxy/compare/v5.0.0...v5.0.1
-[v5.0.2]: https://github.com/sous-chefs/haproxy/compare/v5.0.1...v5.0.2
-[v5.0.3]: https://github.com/sous-chefs/haproxy/compare/v5.0.2...v5.0.3
-[v5.0.4]: https://github.com/sous-chefs/haproxy/compare/v5.0.3...v5.0.4
-[v6.0.0]: https://github.com/sous-chefs/haproxy/compare/v5.0.4...v6.0.0
-[v6.1.0]: https://github.com/sous-chefs/haproxy/compare/v6.0.0...v6.1.0
-[v6.2.0]: https://github.com/sous-chefs/haproxy/compare/v6.1.0...v6.2.0
-[v6.2.1]: https://github.com/sous-chefs/haproxy/compare/v6.2.0...v6.2.1
-[v6.2.2]: https://github.com/sous-chefs/haproxy/compare/v6.2.1...v6.2.2
-[v6.2.3]: https://github.com/sous-chefs/haproxy/compare/v6.2.2...v6.2.3
-[v6.2.4]: https://github.com/sous-chefs/haproxy/compare/v6.2.3...v6.2.4
-[v6.2.5]: https://github.com/sous-chefs/haproxy/compare/v6.2.4...v6.2.5
-[v6.2.6]: https://github.com/sous-chefs/haproxy/compare/v6.2.5...v6.2.6
-[v6.2.7]: https://github.com/sous-chefs/haproxy/compare/v6.2.6...v6.2.7
-[v6.3.0]: https://github.com/sous-chefs/haproxy/compare/v6.2.7...v6.3.0
-[v6.4.0]: https://github.com/sous-chefs/haproxy/compare/v6.3.0...v6.4.0
-[v7.0.0]: https://github.com/sous-chefs/haproxy/compare/v6.4.0...v7.0.0
-[v7.1.0]: https://github.com/sous-chefs/haproxy/compare/v7.0.0...v7.1.0
-[v8.0.0]: https://github.com/sous-chefs/haproxy/compare/v7.1.0...v8.0.0
-[v8.1.0]: https://github.com/sous-chefs/haproxy/compare/v8.0.0...v8.1.0
-[v8.1.1]: https://github.com/sous-chefs/haproxy/compare/v8.1.0...v8.1.1
-[v8.2.0]: https://github.com/sous-chefs/haproxy/compare/v8.1.1...v8.2.0
-[v8.3.0]: https://github.com/sous-chefs/haproxy/compare/v8.2.0...v8.3.0
diff --git a/lc-gdn-chef/cookbooks/haproxy/LICENSE b/lc-gdn-chef/cookbooks/haproxy/LICENSE
deleted file mode 100644
index 11069edd79019f7dafbe3138841cf289209270dd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/haproxy/README.md b/lc-gdn-chef/cookbooks/haproxy/README.md
deleted file mode 100644
index 24017f8cb381f73758a3879c5373304b07748463..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/README.md
+++ /dev/null
@@ -1,161 +0,0 @@
-# haproxy Cookbook
-
-[](https://github.com/sous-chefs/haproxy/actions?query=workflow%3Aci)
-[](https://supermarket.chef.io/cookbooks/haproxy)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-Installs and configures HAProxy.
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working together to maintain important cookbooks. If you’d like to know more please visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-## Requirements
-
-* HAProxy `stable` or `LTS`
-* Chef 13.9+
-
-### Platforms
-
-This cookbook officially supports and is tested against the following platforms:
-
-* debian: 9 & 10
-* ubuntu: 20.04 & 21.04
-* centos: 7 & 8
-* centos-stream: 8
-* fedora: latest
-* amazonlinux: 2
-
-PRs are welcome to add support for additional platforms.
-
-### Examples
-
-Please check for working examples in [TEST](./test/fixtures/cookbooks/test/)
-
-## Common Resource Features
-
-HAProxy has many configurable options available, this cookbook makes the most popular options available as resource properties.
-
-If you wish to use a HAProxy property that is not listed the `extra_options` hash is available to take in any number of additional values.
-
-For example, the ability to disable listeners is not provided out of the box. Further examples can be found in either `test/fixtures/recipes` or `spec/test/recipes`. If you have questions on how this works or would like to add more examples so it is easier to understand, please come talk to us on the [Chef Community Slack](http://community-slack.chef.io/) on the #sous-chefs channel.
-
-```ruby
-haproxy_listen 'disabled' do
- bind '0.0.0.0:1337'
- mode 'http'
- extra_options('disabled': '')
-end
-```
-
-The `extra_options` hash is of `String => String` or `String => Array`. When an `Array` value is provided. The values are looped over mapping the key to each value in the config.
-
-For example:
-
-```ruby
-haproxy_listen 'default' do
- extra_options(
- 'http-request' => [ 'set-header X-Public-User yes', 'del-header X-Bad-Header' ]
- )
-end
-```
-
-Becomes:
-
-```haproxy
-listen default
- ...
- http-request set-header X-Public-User yes
- http-request del-header X-Bad-Header
-```
-
-## Resources
-
-* [haproxy_acl](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_acl.md)
-* [haproxy_backend](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_backend.md)
-* [haproxy_cache](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_cache.md)
-* [haproxy_config_defaults](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_config_defaults.md)
-* [haproxy_config_global](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_config_global.md)
-* [haproxy_fastcgi](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_fastcgi.md)
-* [haproxy_frontend](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_frontend.md)
-* [haproxy_install](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_install.md)
-* [haproxy_listen](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_listen.md)
-* [haproxy_mailer](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_mailer.md)
-* [haproxy_peer](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_peer.md)
-* [haproxy_resolver](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_resolver.md)
-* [haproxy_service](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_service.md)
-* [haproxy_use_backend](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_use_backend.md)
-* [haproxy_userlist](https://github.com/sous-chefs/haproxy/tree/master/documentation/haproxy_userlist.md)
-
-## Configuration Validation
-
-The `haproxy.cfg` file has a few specific rule orderings that will generate validation errors if not loaded properly. If using any combination of the below rules, avoid the errors by loading the rules via `extra_options` to specify the top down order as noted below in config file.
-
-### frontend & listen
-
-```haproxy
- tcp-request connection
- tcp-request session
- tcp-request content
- monitor fail
- block (deprecated)
- http-request
- reqxxx (any req excluding reqadd, e.g. reqdeny, reqallow)
- reqadd
- redirect
- use_backend
-```
-
-```ruby
- extra_options(
- 'tcp-request' => 'connection set-src src,ipmask(24)',
- 'reqdeny' => '^Host:\ .*\.local',
- 'reqallow' => '^Host:\ www\.',
- 'use_backend' => 'dynamic'
- )
-```
-
-### backend
-
-```haproxy
- http-request
- reqxxx (any req excluding reqadd, e.g. reqdeny, reqallow)
- reqadd
- redirect
-```
-
-```ruby
- extra_options(
- 'http-request' => 'set-path /%[hdr(host)]%[path]',
- 'reqdeny' => '^Host:\ .*\.local',
- 'reqallow' => '^Host:\ www\.',
- 'redirect' => 'dynamic'
- )
-```
-
-## Contributors
-
-This project exists thanks to all the people who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/haproxy/chefignore b/lc-gdn-chef/cookbooks/haproxy/chefignore
deleted file mode 100644
index a27b0b258ce499b1c554591535a85bcdb9292236..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen*.yml
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/haproxy/files/haproxy-default b/lc-gdn-chef/cookbooks/haproxy/files/haproxy-default
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lc-gdn-chef/cookbooks/haproxy/libraries/helpers.rb b/lc-gdn-chef/cookbooks/haproxy/libraries/helpers.rb
deleted file mode 100644
index a85f12464de622e6d702950c4c732130df68b056..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/libraries/helpers.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-module Haproxy
- module Cookbook
- module Helpers
- def haproxy_version
- v = Mixlib::ShellOut.new("haproxy -v | grep version | awk '{ print $3 }'")
- v.run_command.stdout.to_f
- end
-
- def source_package_list
- case node['platform_family']
- when 'debian'
- %w(libpcre3-dev libssl-dev zlib1g-dev libsystemd-dev)
- when 'rhel', 'amazon', 'fedora'
- %w(pcre-devel openssl-devel zlib-devel systemd-devel)
- when 'suse'
- %w(pcre-devel libopenssl-devel zlib-devel systemd-devel)
- end
- end
-
- def ius_package
- {
- name: 'ius-release.rpm',
- url: 'https://repo.ius.io/ius-release-el7.rpm',
- }
- end
-
- def ius_platform_valid?
- platform_family?('rhel') && (platform_version.to_i == 6 || platform_version.to_i == 7)
- end
-
- def target_os(source_version)
- major_revision = node['kernel']['release'].split('.')[0..1].join('.').to_f
- minor_revision = node['kernel']['release'].split('.')[2].split('-').first.to_i
-
- if major_revision > 2.6
- source_version.chars.first == '1' ? 'linux2628' : 'linux-glibc'
- elsif major_revision == 2.6
- if minor_revision >= 28
- source_version.chars.first == '1' ? 'linux2628' : 'linux-glibc'
- else
- 'linux26'
- end
- else
- 'generic'
- end
- end
-
- def systemd_command(bin_prefix)
- if haproxy_version < 1.8
- ::File.join(bin_prefix, 'sbin', 'haproxy-systemd-wrapper')
- else
- ::File.join(bin_prefix, 'sbin', 'haproxy') + ' -Ws'
- end
- end
-
- def default_systemd_unit_content
- {
- 'Unit' => {
- 'Description' => 'HAProxy Load Balancer',
- 'Documentation' => 'file:/usr/share/doc/haproxy/configuration.txt.gz',
- 'After' => %w(network.target syslog.service),
- },
- 'Service' => {
- 'EnvironmentFile' => '-/etc/default/haproxy',
- 'Environment' => "CONFIG=#{config_file} PIDFILE=/run/haproxy.pid",
- 'ExecStartPre' => "#{bin_prefix}/sbin/haproxy -f $CONFIG -c -q",
- 'ExecStart' => "#{systemd_command(bin_prefix)} -f $CONFIG -p $PIDFILE $OPTIONS",
- 'ExecReload' => [
- "#{bin_prefix}/sbin/haproxy -f $CONFIG -c -q",
- '/bin/kill -USR2 $MAINPID',
- ],
- 'KillSignal' => 'TERM',
- 'User' => 'root',
- 'WorkingDirectory' => '/',
- 'KillMode' => 'mixed',
- 'Restart' => 'always',
- },
- 'Install' => {
- 'WantedBy' => 'multi-user.target',
- },
- }
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/libraries/resource.rb b/lc-gdn-chef/cookbooks/haproxy/libraries/resource.rb
deleted file mode 100644
index 9d5f7c908bc21a90ec99af6b261be2ff677db10c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/libraries/resource.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-module Haproxy
- module Cookbook
- module ResourceHelpers
- def haproxy_config_resource_init
- haproxy_config_resource_create unless haproxy_config_resource_exist?
- end
-
- def haproxy_config_resource
- return unless haproxy_config_resource_exist?
-
- find_resource!(:template, new_resource.config_file)
- end
-
- private
-
- def haproxy_config_resource_exist?
- !find_resource!(:template, new_resource.config_file).nil?
- rescue Chef::Exceptions::ResourceNotFound
- false
- end
-
- def haproxy_config_resource_create
- with_run_context(:root) do
- declare_resource(:directory, ::File.dirname(new_resource.config_file)) do
- owner new_resource.user
- group new_resource.group
- mode new_resource.config_dir_mode
-
- recursive true
-
- action :create
- end
-
- declare_resource(:template, new_resource.config_file) do
- cookbook new_resource.cookbook
- source new_resource.template
-
- owner new_resource.user
- group new_resource.group
- mode new_resource.config_file_mode
- sensitive new_resource.sensitive
-
- helpers(Haproxy::Cookbook::TemplateHelpers)
-
- action :nothing
- delayed_action :create
- end
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/libraries/template.rb b/lc-gdn-chef/cookbooks/haproxy/libraries/template.rb
deleted file mode 100644
index 7b6d151e7db6d015c4e428eeb66d9f6c845657cd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/libraries/template.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Haproxy
- module Cookbook
- module TemplateHelpers
- def nil_or_empty?(v)
- v.nil? || (v.respond_to?(:empty?) && v.empty?)
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/metadata.json b/lc-gdn-chef/cookbooks/haproxy/metadata.json
deleted file mode 100644
index bb64f0e9691d5a46d3766e96a2e9e414012b03e1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/metadata.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
- "name": "haproxy",
- "description": "Installs and configures haproxy",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "debian": ">= 0.0.0",
- "ubuntu": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "amazon": ">= 0.0.0",
- "opensuseleap": ">= 0.0.0"
- },
- "dependencies": {
- "yum-epel": ">= 0.0.0"
- },
- "providing": {
-
- },
- "recipes": {
-
- },
- "version": "12.2.3",
- "source_url": "https://github.com/sous-chefs/haproxy",
- "issues_url": "https://github.com/sous-chefs/haproxy/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 16"
- ]
- ],
- "ohai_versions": [
-
- ],
- "gems": [
-
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/haproxy/metadata.rb b/lc-gdn-chef/cookbooks/haproxy/metadata.rb
deleted file mode 100644
index c63eebcf568cd0a7f504a8fe0637daffe526cecf..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/metadata.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-name 'haproxy'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-license 'Apache-2.0'
-description 'Installs and configures haproxy'
-version '12.2.3'
-source_url 'https://github.com/sous-chefs/haproxy'
-issues_url 'https://github.com/sous-chefs/haproxy/issues'
-chef_version '>= 16'
-
-supports 'debian'
-supports 'ubuntu'
-supports 'centos'
-supports 'amazon'
-supports 'opensuseleap'
-depends 'yum-epel'
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/acl.rb b/lc-gdn-chef/cookbooks/haproxy/resources/acl.rb
deleted file mode 100644
index 1a88bcde35b6023daa33def8fe64169f8126ac98..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/acl.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-use 'partial/_config_file'
-
-property :acl, [String, Array],
- name_property: true,
- coerce: proc { |p| Array(p) },
- description: 'The access control list items'
-
-property :section, String,
- required: true,
- equal_to: %w(frontend listen backend),
- description: 'The section where the acl(s) should be applied'
-
-property :section_name, String,
- required: true,
- description: 'The name of the specific frontend, listen or backend section'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables[new_resource.section] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['acl'] ||= []
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['acl'].push(new_resource.acl)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables[new_resource.section] ||= {}
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['acl'] ||= []
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['acl'].delete(new_resource.acl)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/backend.rb b/lc-gdn-chef/cookbooks/haproxy/resources/backend.rb
deleted file mode 100644
index 496659f8e3febff40ed6c17cd1571ac49c45e9f8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/backend.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :mode, String,
- equal_to: %w(http tcp health),
- description: 'Set the running mode or protocol of the instance'
-
-property :server, [String, Array],
- coerce: proc { |p| Array(p) },
- description: 'Servers the backend routes to'
-
-property :tcp_request, [String, Array],
- coerce: proc { |p| Array(p) },
- description: 'HAProxy tcp-request settings'
-
-property :reqrep, [String, Array],
- coerce: proc { |p| Array(p) },
- description: 'Replace a regular expression with a string in an HTTP request line'
-
-property :reqirep, [String, Array],
- coerce: proc { |p| Array(p) },
- description: 'reqrep ignoring case'
-
-property :acl, Array,
- description: 'Access control list items'
-
-property :option, Array,
- description: 'Array of HAProxy option directives'
-
-property :hash_type, String,
- equal_to: %w(consistent map-based),
- description: 'Specify a method to use for mapping hashes to servers'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['backend'] ||= {}
-
- haproxy_config_resource.variables['backend'][new_resource.name] ||= {}
- haproxy_config_resource.variables['backend'][new_resource.name]['mode'] = new_resource.mode if property_is_set?(:mode)
-
- if property_is_set?(:server)
- haproxy_config_resource.variables['backend'][new_resource.name]['server'] ||= []
- haproxy_config_resource.variables['backend'][new_resource.name]['server'].push(new_resource.server)
- end
-
- if property_is_set?(:tcp_request)
- haproxy_config_resource.variables['backend'][new_resource.name]['tcp_request'] ||= []
- haproxy_config_resource.variables['backend'][new_resource.name]['tcp_request'].push(new_resource.tcp_request)
- end
-
- haproxy_config_resource.variables['backend'][new_resource.name]['reqrep'] = new_resource.reqrep.flatten if property_is_set?(:reqrep)
- haproxy_config_resource.variables['backend'][new_resource.name]['reqirep'] = new_resource.reqirep.flatten if property_is_set?(:reqirep)
-
- if property_is_set?(:acl)
- haproxy_config_resource.variables['backend'][new_resource.name]['acl'] ||= []
- haproxy_config_resource.variables['backend'][new_resource.name]['acl'].push(new_resource.acl)
- end
-
- if property_is_set?(:option)
- haproxy_config_resource.variables['backend'][new_resource.name]['option'] ||= []
- haproxy_config_resource.variables['backend'][new_resource.name]['option'].push(new_resource.option)
- end
-
- haproxy_config_resource.variables['backend'][new_resource.name]['hash_type'] = new_resource.hash_type if property_is_set?(:hash_type)
- haproxy_config_resource.variables['backend'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['backend'] ||= {}
-
- haproxy_config_resource.variables['backend'][new_resource.name] ||= {}
- haproxy_config_resource.variables['backend'].delete(new_resource.name) if haproxy_config_resource.variables['backend'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/cache.rb b/lc-gdn-chef/cookbooks/haproxy/resources/cache.rb
deleted file mode 100644
index 8e7b7d2336a1b37ee2e9bb545bd25e20c706bdfd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/cache.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-use 'partial/_config_file'
-
-property :cache_name, String,
- name_property: true,
- description: 'Name of the cache'
-
-property :total_max_size, Integer,
- description: 'Define the size in RAM of the cache in megabytes'
-
-property :max_object_size, Integer,
- description: 'Define the maximum size of the objects to be cached'
-
-property :max_age, Integer,
- description: 'Define the maximum expiration duration in seconds'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['cache'] ||= {}
-
- haproxy_config_resource.variables['cache'][new_resource.cache_name] ||= {}
- haproxy_config_resource.variables['cache'][new_resource.cache_name]['total_max_size'] = new_resource.total_max_size if property_is_set?(:total_max_size)
- haproxy_config_resource.variables['cache'][new_resource.cache_name]['max_object_size'] = new_resource.max_object_size if property_is_set?(:max_object_size)
- haproxy_config_resource.variables['cache'][new_resource.cache_name]['max_age'] = new_resource.max_age if property_is_set?(:max_age)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['cache'] ||= {}
-
- haproxy_config_resource.variables['cache'][new_resource.cache_name] ||= {}
- haproxy_config_resource.variables['cache'].delete(new_resource.cache_name) if haproxy_config_resource.variables['cache'].key?(new_resource.cache_name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/config_defaults.rb b/lc-gdn-chef/cookbooks/haproxy/resources/config_defaults.rb
deleted file mode 100644
index 501880b95edf959c2619c115769d15e85a2024bb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/config_defaults.rb
+++ /dev/null
@@ -1,64 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :timeout, Hash,
- default: { client: '10s', server: '10s', connect: '10s' },
- description: 'Default HAProxy timeout values'
-
-property :log, String,
- default: 'global',
- description: 'Enable per-instance logging of events and traffic'
-
-property :mode, String,
- default: 'http',
- equal_to: %w(http tcp health),
- description: 'Set the running mode or protocol of the instance'
-
-property :balance, String,
- default: 'roundrobin',
- equal_to: %w(roundrobin static-rr leastconn first source uri url_param header rdp-cookie),
- description: 'Define the load balancing algorithm to be used in a backend'
-
-property :option, Array,
- default: %w(httplog dontlognull redispatch tcplog),
- description: 'Array of HAProxy option directives'
-
-property :stats, Hash,
- default: {},
- description: 'Enable HAProxy statistics'
-
-property :maxconn, Integer,
- description: 'Sets the maximum per-process number of concurrent connections'
-
-property :haproxy_retries, Integer,
- description: 'Set the number of retries to perform on a server after a connection failure'
-
-property :hash_type, [String, nil],
- equal_to: ['consistent', 'map-based', nil],
- description: 'Specify a method to use for mapping hashes to servers'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['defaults'] ||= {}
-
- haproxy_config_resource.variables['defaults']['timeout'] = new_resource.timeout
- haproxy_config_resource.variables['defaults']['log'] = new_resource.log
- haproxy_config_resource.variables['defaults']['mode'] = new_resource.mode
- haproxy_config_resource.variables['defaults']['balance'] = new_resource.balance
-
- haproxy_config_resource.variables['defaults']['option'] ||= []
- haproxy_config_resource.variables['defaults']['option'].push(new_resource.option).flatten!
-
- haproxy_config_resource.variables['defaults']['stats'] = new_resource.stats
- haproxy_config_resource.variables['defaults']['maxconn'] = new_resource.maxconn.to_s if property_is_set?(:maxconn)
- haproxy_config_resource.variables['defaults']['retries'] = new_resource.haproxy_retries.to_s if property_is_set?(:haproxy_retries)
- haproxy_config_resource.variables['defaults']['hash_type'] = new_resource.hash_type if property_is_set?(:hash_type)
- haproxy_config_resource.variables['defaults']['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/config_global.rb b/lc-gdn-chef/cookbooks/haproxy/resources/config_global.rb
deleted file mode 100644
index 827f5c970ffe37f194741d23b7c7bb512196c3e1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/config_global.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :pidfile, String,
- default: '/var/run/haproxy.pid',
- description: 'Writes PIDs of all daemons into file '
-
-property :log, [String, Array],
- default: '/dev/log syslog info',
- description: 'Adds a global syslog server'
-
-property :daemon, [true, false],
- default: true,
- description: 'Makes the process fork into background'
-
-property :debug_option, String,
- default: 'quiet',
- equal_to: %w(quiet debug),
- description: 'Sets the debugging mode'
-
-property :stats, Hash,
- default: lazy {
- {
- socket: "/var/run/haproxy.sock user #{user} group #{group}",
- timeout: '2m',
- }
- },
- description: 'Enable stats with various options'
-
-property :maxconn, [Integer, String],
- default: 4096,
- description: 'Sets the maximum per-process number of concurrent connections'
-
-property :chroot, String,
- description: 'Changes current directory to and performs a chroot() there before dropping privileges'
-
-property :log_tag, String,
- default: 'haproxy',
- description: 'Specifies the log tag to use for all outgoing logs'
-
-property :tuning, Hash,
- description: 'A hash of tune.'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['global'] ||= {}
-
- haproxy_config_resource.variables['global']['user'] = new_resource.user
- haproxy_config_resource.variables['global']['group'] = new_resource.group
- haproxy_config_resource.variables['global']['pidfile'] = new_resource.pidfile
-
- haproxy_config_resource.variables['global']['log'] ||= []
- haproxy_config_resource.variables['global']['log'].push(new_resource.log)
-
- haproxy_config_resource.variables['global']['log_tag'] = new_resource.log_tag
- haproxy_config_resource.variables['global']['chroot'] = new_resource.chroot if property_is_set?(:chroot)
- haproxy_config_resource.variables['global']['daemon'] = new_resource.daemon.to_s
- haproxy_config_resource.variables['global']['debug_option'] = new_resource.debug_option
- haproxy_config_resource.variables['global']['maxconn'] = new_resource.maxconn
- haproxy_config_resource.variables['global']['stats'] = new_resource.stats
- haproxy_config_resource.variables['global']['tuning'] = new_resource.tuning if property_is_set?(:tuning)
- haproxy_config_resource.variables['global']['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/fastcgi.rb b/lc-gdn-chef/cookbooks/haproxy/resources/fastcgi.rb
deleted file mode 100644
index 70729dbb85ab984a343eff1531a97bdf6218b6bc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/fastcgi.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-use 'partial/_config_file'
-
-property :fastcgi, String,
- name_property: true,
- description: 'Name property - sets the fcgi-app name'
-
-property :docroot, String,
- description: 'Define the document root on the remote host'
-
-property :index, String,
- description: 'Define the script name that will be appended after an URI that ends with a slash'
-
-property :log_stderr, String,
- description: 'Enable logging of STDERR messages reported by the FastCGI application'
-
-property :option, Array,
- description: 'Array of HAProxy option directives'
-
-property :extra_options, Hash,
- description: 'Used for setting any HAProxy directives'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['fastcgi'] ||= {}
-
- haproxy_config_resource.variables['fastcgi'][new_resource.name] ||= {}
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['docroot'] = new_resource.docroot if property_is_set?(:docroot)
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['index'] = new_resource.index if property_is_set?(:index)
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['log_stderr'] = new_resource.log_stderr if property_is_set?(:log_stderr)
-
- if property_is_set?(:option)
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['option'] ||= []
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['option'].push(new_resource.option)
- end
-
- haproxy_config_resource.variables['fastcgi'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['fastcgi'] ||= {}
-
- haproxy_config_resource.variables['fastcgi'][new_resource.name] ||= {}
- haproxy_config_resource.variables['fastcgi'].delete(new_resource.name) if haproxy_config_resource.variables['fastcgi'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/frontend.rb b/lc-gdn-chef/cookbooks/haproxy/resources/frontend.rb
deleted file mode 100644
index 13746dedecc2055cc62be1c8f22e5e5fac1925c7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/frontend.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :bind, [String, Hash],
- default: '0.0.0.0:80',
- description: 'String - sets as given. Hash - joins with a space'
-
-property :mode, String,
- equal_to: %w(http tcp health),
- description: 'Set the running mode or protocol of the instance'
-
-property :maxconn, [Integer, String],
- coerce: proc { |p| p.to_s },
- description: 'Sets the maximum per-process number of concurrent connections'
-
-property :reqrep, [Array, String],
- description: 'Replace a regular expression with a string in an HTTP request line'
-
-property :reqirep, [Array, String],
- description: 'reqrep ignoring case'
-
-property :default_backend, String,
- description: 'Specify the backend to use when no "use_backend" rule has been matched'
-
-property :use_backend, Array,
- description: 'Switch to a specific backend if/unless an ACL-based condition is matched'
-
-property :acl, Array,
- description: 'Access control list items'
-
-property :option, Array,
- description: 'Array of HAProxy option directives'
-
-property :stats, Hash,
- description: 'Enable stats with various options'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['frontend'] ||= {}
-
- haproxy_config_resource.variables['frontend'][new_resource.name] ||= {}
- haproxy_config_resource.variables['frontend'][new_resource.name]['bind'] = []
-
- if new_resource.bind.is_a?(Hash)
- new_resource.bind.map do |addresses, ports|
- Array(addresses).product(Array(ports)).each do |combo|
- haproxy_config_resource.variables['frontend'][new_resource.name]['bind'].push(combo.join(' ').strip)
- end
- end
- else
- haproxy_config_resource.variables['frontend'][new_resource.name]['bind'].push(new_resource.bind)
- end
-
- haproxy_config_resource.variables['frontend'][new_resource.name]['default_backend'] = new_resource.default_backend if property_is_set?(:default_backend)
- haproxy_config_resource.variables['frontend'][new_resource.name]['mode'] = new_resource.mode if property_is_set?(:mode)
- haproxy_config_resource.variables['frontend'][new_resource.name]['stats'] = new_resource.stats if property_is_set?(:stats)
- haproxy_config_resource.variables['frontend'][new_resource.name]['maxconn'] = new_resource.maxconn if property_is_set?(:maxconn)
- haproxy_config_resource.variables['frontend'][new_resource.name]['reqrep'] = [new_resource.reqrep].flatten if property_is_set?(:reqrep)
- haproxy_config_resource.variables['frontend'][new_resource.name]['reqirep'] = [new_resource.reqirep].flatten if property_is_set?(:reqirep)
- haproxy_config_resource.variables['frontend'][new_resource.name]['use_backend'] = new_resource.use_backend if property_is_set?(:use_backend)
-
- if property_is_set?(:acl)
- haproxy_config_resource.variables['frontend'][new_resource.name]['acl'] ||= []
- haproxy_config_resource.variables['frontend'][new_resource.name]['acl'].push(new_resource.acl)
- end
-
- if property_is_set?(:option)
- haproxy_config_resource.variables['frontend'][new_resource.name]['option'] ||= []
- haproxy_config_resource.variables['frontend'][new_resource.name]['option'].push(new_resource.option)
- end
-
- haproxy_config_resource.variables['frontend'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['frontend'] ||= {}
-
- haproxy_config_resource.variables['frontend'][new_resource.name] ||= {}
- haproxy_config_resource.variables['frontend'].delete(new_resource.name) if haproxy_config_resource.variables['frontend'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/install.rb b/lc-gdn-chef/cookbooks/haproxy/resources/install.rb
deleted file mode 100644
index bf8d226cafcb1e06b7f40b2a4af0e5d525e2cfa2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/install.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-include Haproxy::Cookbook::Helpers
-
-use 'partial/_config_file'
-
-property :install_type, String,
- name_property: true,
- equal_to: %w(package source),
- description: 'Set the installation type'
-
-property :bin_prefix, String,
- default: '/usr',
- description: 'Set the source compile prefix'
-
-property :sensitive, [true, false],
- default: true,
- description: 'Ensure that sensitive resource data is not logged by the chef-client'
-
-# Package
-property :package_name, String,
- default: 'haproxy'
-
-property :package_version, [String, nil]
-
-property :enable_ius_repo, [true, false],
- default: false,
- description: 'Enables the IUS package repo for Centos to install versions >1.5'
-
-property :enable_epel_repo, [true, false],
- default: true,
- description: 'Enables the epel repo for RHEL based operating systems'
-
-# Source
-property :source_version, String,
- default: '2.2.4'
-
-property :source_url, String,
- default: lazy { "https://www.haproxy.org/download/#{source_version.to_f}/src/haproxy-#{source_version}.tar.gz" }
-
-property :source_checksum, String,
- default: '87a4d9d4ff8dc3094cb61bbed4a8eed2c40b5ac47b9604daebaf036d7b541be2'
-
-property :source_target_cpu, String,
- default: lazy { node['kernel']['machine'] }
-
-property :source_target_arch, String
-
-property :source_target_os, String,
- default: lazy { target_os(source_version) }
-
-property :use_libcrypt, [true, false],
- default: true
-
-property :use_pcre, [true, false],
- default: true
-
-property :use_promex, [true, false],
- default: false
-
-property :use_openssl, [true, false],
- default: true
-
-property :use_zlib, [true, false],
- default: true
-
-property :use_linux_tproxy, [true, false],
- default: true
-
-property :use_linux_splice, [true, false],
- default: true
-
-property :use_lua, [true, false],
- default: false
-
-property :lua_lib, String
-
-property :lua_inc, String
-
-property :ssl_lib, String
-
-property :ssl_inc, String
-
-property :use_systemd, [true, false],
- default: lazy { source_version.to_f >= 1.8 },
- description: 'Evalues whether to use systemd based on the nodes init package'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action_class do
- include Haproxy::Cookbook::Helpers
- include Haproxy::Cookbook::ResourceHelpers
-
- def compile_make_boolean(bool)
- bool ? '1' : '0'
- end
-end
-
-action :install do
- case new_resource.install_type
- when 'package'
- case node['platform_family']
- when 'amazon'
- include_recipe 'yum-epel' if new_resource.enable_epel_repo
- when 'rhel'
- include_recipe 'yum-epel' if new_resource.enable_epel_repo
-
- if new_resource.enable_ius_repo
- if ius_platform_valid?
- puts ius_package[:url]
-
- remote_file ::File.join(Chef::Config[:file_cache_path], ius_package[:name]) do
- source ius_package[:url]
- only_if { new_resource.enable_ius_repo }
- end
-
- package ius_package[:name] do
- source ::File.join(Chef::Config[:file_cache_path], ius_package[:name])
- only_if { new_resource.enable_ius_repo }
- end
- else
- log 'This platform is not supported by IUS, ignoring enable_ius_repo property' do
- level :warn
- end
- end
- end
- end
-
- package new_resource.package_name do
- version new_resource.package_version if new_resource.package_version
- end
- when 'source'
- build_essential 'compilation tools'
- package source_package_list
-
- remote_file 'haproxy source file' do
- path ::File.join(Chef::Config[:file_cache_path], "haproxy-#{new_resource.source_version}.tar.gz")
- source new_resource.source_url
- checksum new_resource.source_checksum if new_resource.source_checksum
- action :create
- end
-
- make_cmd = "make TARGET=#{new_resource.source_target_os}"
- make_cmd << " CPU=#{new_resource.source_target_cpu}" if property_is_set?(:source_target_cpu)
- make_cmd << " ARCH=#{new_resource.source_target_arch}" if property_is_set?(:source_target_arch)
- make_cmd << " USE_LIBCRYPT=#{compile_make_boolean(new_resource.use_libcrypt)}"
- make_cmd << " USE_PCRE=#{compile_make_boolean(new_resource.use_pcre)}"
- make_cmd << " USE_OPENSSL=#{compile_make_boolean(new_resource.use_openssl)}"
- make_cmd << " USE_ZLIB=#{compile_make_boolean(new_resource.use_zlib)}"
- make_cmd << " USE_LINUX_TPROXY=#{compile_make_boolean(new_resource.use_linux_tproxy)}"
- make_cmd << " USE_LINUX_SPLICE=#{compile_make_boolean(new_resource.use_linux_splice)}"
- make_cmd << " USE_SYSTEMD=#{compile_make_boolean(new_resource.use_systemd)}"
- make_cmd << " USE_LUA=#{compile_make_boolean(new_resource.use_lua)}" if new_resource.use_lua
- make_cmd << " USE_PROMEX=#{compile_make_boolean(new_resource.use_promex)}" if new_resource.use_promex
- make_cmd << " LUA_LIB=#{new_resource.lua_lib}" if property_is_set?(:lua_lib)
- make_cmd << " LUA_INC=#{new_resource.lua_inc}" if property_is_set?(:lua_inc)
- make_cmd << " SSL_LIB=#{new_resource.ssl_lib}" if property_is_set?(:ssl_lib)
- make_cmd << " SSL_INC=#{new_resource.ssl_inc}" if property_is_set?(:ssl_inc)
- extra_cmd = ' EXTRA=haproxy-systemd-wrapper' if new_resource.source_version.to_f < 1.8
-
- bash 'compile_haproxy' do
- cwd Chef::Config[:file_cache_path]
- code <<-EOH
- tar xzf haproxy-#{new_resource.source_version}.tar.gz
- cd haproxy-#{new_resource.source_version}
- #{make_cmd} && make install PREFIX=#{new_resource.bin_prefix} #{extra_cmd}
- EOH
- not_if "#{::File.join(new_resource.bin_prefix, 'sbin', 'haproxy')} -v | grep #{new_resource.source_version}"
- end
- end
-
- with_run_context :root do
- group new_resource.group
-
- user new_resource.user do
- home "/home/#{new_resource.user}"
- group new_resource.group
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/listen.rb b/lc-gdn-chef/cookbooks/haproxy/resources/listen.rb
deleted file mode 100644
index f7e145fa9a127325f5749a279fb92cafe284279d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/listen.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :bind, [String, Hash],
- default: '0.0.0.0:80',
- description: 'String - sets as given. Hash - joins with a space'
-
-property :mode, String,
- equal_to: %w(http tcp health),
- description: 'Set the running mode or protocol of the instance'
-
-property :maxconn, Integer,
- description: 'Sets the maximum per-process number of concurrent connections'
-
-property :stats, Hash,
- description: 'Enable stats with various options'
-
-property :http_request, [Array, String],
- coerce: proc { |p| Array(p).flatten },
- description: 'Switch to a specific backend if/unless an ACL-based condition is matched'
-
-property :http_response, String,
- description: 'Switch to a specific backend if/unless an ACL-based condition is matched'
-
-property :reqrep, [Array, String],
- coerce: proc { |p| Array(p).flatten },
- description: 'Replace a regular expression with a string in an HTTP request line'
-
-property :reqirep, [Array, String],
- coerce: proc { |p| Array(p).flatten },
- description: 'reqrep ignoring case'
-
-property :default_backend, String,
- description: 'Specify the backend to use when no "use_backend" rule has been matched'
-
-property :use_backend, Array,
- description: 'Switch to a specific backend if/unless an ACL-based condition is matched'
-
-property :acl, Array,
- description: 'Access control list items'
-
-property :server, Array,
- description: 'Servers the listen section routes to'
-
-property :hash_type, String,
- equal_to: %w(consistent map-based),
- description: 'Specify a method to use for mapping hashes to servers'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['listen'] ||= {}
-
- haproxy_config_resource.variables['listen'][new_resource.name] ||= {}
- haproxy_config_resource.variables['listen'][new_resource.name]['bind'] ||= []
-
- if new_resource.bind.is_a?(Hash)
- new_resource.bind.map do |addresses, ports|
- Array(addresses).product(Array(ports)).each do |combo|
- haproxy_config_resource.variables['listen'][new_resource.name]['bind'].push(combo.join(' ').strip)
- end
- end
- else
- haproxy_config_resource.variables['listen'][new_resource.name]['bind'].push(new_resource.bind)
- end
-
- haproxy_config_resource.variables['listen'][new_resource.name]['mode'] = new_resource.mode if property_is_set?(:mode)
- haproxy_config_resource.variables['listen'][new_resource.name]['maxconn'] = new_resource.maxconn.to_s if property_is_set?(:mode)
- haproxy_config_resource.variables['listen'][new_resource.name]['stats'] = new_resource.stats if property_is_set?(:stats)
- haproxy_config_resource.variables['listen'][new_resource.name]['http_request'] = new_resource.http_request if property_is_set?(:http_request)
- haproxy_config_resource.variables['listen'][new_resource.name]['http_response'] = new_resource.http_response if property_is_set?(:http_response)
- haproxy_config_resource.variables['listen'][new_resource.name]['reqrep'] = new_resource.reqrep if property_is_set?(:reqrep)
- haproxy_config_resource.variables['listen'][new_resource.name]['reqirep'] = new_resource.reqirep if property_is_set?(:reqirep)
-
- if property_is_set?(:use_backend)
- haproxy_config_resource.variables['listen'][new_resource.name]['use_backend'] ||= []
- haproxy_config_resource.variables['listen'][new_resource.name]['use_backend'].push(new_resource.use_backend)
- end
-
- if property_is_set?(:acl)
- haproxy_config_resource.variables['listen'][new_resource.name]['acl'] ||= []
- haproxy_config_resource.variables['listen'][new_resource.name]['acl'].push(new_resource.acl)
- end
-
- haproxy_config_resource.variables['listen'][new_resource.name]['default_backend'] = new_resource.default_backend if property_is_set?(:default_backend)
-
- if property_is_set?(:server)
- haproxy_config_resource.variables['listen'][new_resource.name]['server'] ||= []
- haproxy_config_resource.variables['listen'][new_resource.name]['server'].push(new_resource.server)
- end
-
- haproxy_config_resource.variables['listen'][new_resource.name]['hash_type'] = new_resource.hash_type if property_is_set?(:hash_type)
- haproxy_config_resource.variables['listen'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['listen'] ||= {}
-
- haproxy_config_resource.variables['listen'][new_resource.name] ||= {}
- haproxy_config_resource.variables['listen'].delete(new_resource.name) if haproxy_config_resource.variables['listen'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/mailer.rb b/lc-gdn-chef/cookbooks/haproxy/resources/mailer.rb
deleted file mode 100644
index 0d4b3c7b0aa55ab0ae79316ca863dd4fe2f4bd3c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/mailer.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-use 'partial/_config_file'
-
-property :mailer, [String, Array],
- coerce: proc { |p| Array(p).flatten },
- description: 'Defines a mailer inside a mailers section'
-
-property :timeout, String,
- description: 'Defines the time available for a mail/connection to be made and send to the mail-server'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['mailer'] ||= {}
-
- haproxy_config_resource.variables['mailer'][new_resource.name] ||= {}
- haproxy_config_resource.variables['mailer'][new_resource.name]['mailer'] = new_resource.mailer if property_is_set?(:mailer)
- haproxy_config_resource.variables['mailer'][new_resource.name]['timeout'] = new_resource.timeout if property_is_set?(:timeout)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['mailer'] ||= {}
-
- haproxy_config_resource.variables['mailer'][new_resource.name] ||= {}
- haproxy_config_resource.variables['mailer'].delete(new_resource.name) if haproxy_config_resource.variables['mailer'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/partial/_config_file.rb b/lc-gdn-chef/cookbooks/haproxy/resources/partial/_config_file.rb
deleted file mode 100644
index 4a09b51a38db931bbd533187142ddf9f6ddf270b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/partial/_config_file.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-property :user, String,
- default: 'haproxy',
- description: 'Set to override default haproxy user, defaults to haproxy'
-
-property :group, String,
- default: 'haproxy',
- description: 'Set to override default haproxy group, defaults to haproxy'
-
-property :config_dir, String,
- default: '/etc/haproxy',
- desired_state: false,
- description: 'Set to override vault configuration directory'
-
-property :config_dir_mode, String,
- default: '0750',
- description: 'Set to override haproxy config dir mode, defaults to 0750'
-
-property :config_file, String,
- default: lazy { ::File.join(config_dir, 'haproxy.cfg') },
- desired_state: false,
- description: 'Set to override vault configuration file, defaults to /etc/{CONFIG_DIR}/haproxy.cfg'
-
-property :config_file_mode, String,
- default: '0640',
- description: 'Set to override default haproxy config file mode, defaults to 0640'
-
-property :cookbook, String,
- default: 'haproxy',
- desired_state: false,
- description: 'Template source cookbook for the haproxy configuration file'
-
-property :template, String,
- default: 'haproxy.cfg.erb',
- desired_state: false,
- description: 'Template source file for the haproxy configuration file'
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/partial/_extra_options.rb b/lc-gdn-chef/cookbooks/haproxy/resources/partial/_extra_options.rb
deleted file mode 100644
index abccb58f561ac69619ddef297bf3f681f995e1aa..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/partial/_extra_options.rb
+++ /dev/null
@@ -1,2 +0,0 @@
-property :extra_options, Hash,
- description: 'Used for setting any HAProxy directives'
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/peer.rb b/lc-gdn-chef/cookbooks/haproxy/resources/peer.rb
deleted file mode 100644
index c0c27c6b73081962e99b920f4fdf100dd77c6d72..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/peer.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :bind, [String, Hash],
- description: 'String - sets as given. Hash - joins with a space. HAProxy version >= 2.0'
-
-property :state, String,
- equal_to: %w(enabled disabled),
- description: 'Set the state of the peers'
-
-property :server, Array,
- description: 'Servers in the peer'
-
-property :default_bind, String,
- description: 'Defines the binding parameters for the local peer, excepted its address'
-
-property :default_server, String,
- description: 'Change default options for a server'
-
-property :table, Array,
- description: 'Configure a stickiness table'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['peer'] ||= {}
-
- haproxy_config_resource.variables['peer'][new_resource.name] ||= {}
- haproxy_config_resource.variables['peer'][new_resource.name]['bind'] = new_resource.bind if property_is_set?(:bind)
- haproxy_config_resource.variables['peer'][new_resource.name]['state'] = new_resource.state if property_is_set?(:state)
-
- if property_is_set?(:server)
- haproxy_config_resource.variables['peer'][new_resource.name]['server'] ||= []
- haproxy_config_resource.variables['peer'][new_resource.name]['server'].push(new_resource.server)
- end
-
- haproxy_config_resource.variables['peer'][new_resource.name]['default_bind'] = new_resource.default_bind if property_is_set?(:default_bind)
- haproxy_config_resource.variables['peer'][new_resource.name]['default_server'] = new_resource.default_server if property_is_set?(:default_server)
-
- if property_is_set?(:table)
- haproxy_config_resource.variables['peer'][new_resource.name]['table'] ||= []
- haproxy_config_resource.variables['peer'][new_resource.name]['table'].push(new_resource.table)
- end
-
- haproxy_config_resource.variables['peer'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['peer'] ||= {}
-
- haproxy_config_resource.variables['peer'][new_resource.name] ||= {}
- haproxy_config_resource.variables['peer'].delete(new_resource.name) if haproxy_config_resource.variables['peer'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/resolver.rb b/lc-gdn-chef/cookbooks/haproxy/resources/resolver.rb
deleted file mode 100644
index 7eee0458877f36d39a6040ab0e9bad9e448c3f8f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/resolver.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-use 'partial/_config_file'
-use 'partial/_extra_options'
-
-property :nameserver, Array,
- description: 'DNS server description'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['resolvers'] ||= {}
-
- haproxy_config_resource.variables['resolvers'][new_resource.name] ||= {}
-
- if property_is_set?(:nameserver)
- haproxy_config_resource.variables['resolvers'][new_resource.name]['nameserver'] ||= []
- haproxy_config_resource.variables['resolvers'][new_resource.name]['nameserver'].push(new_resource.nameserver)
- end
-
- haproxy_config_resource.variables['resolvers'][new_resource.name]['extra_options'] = new_resource.extra_options if property_is_set?(:extra_options)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['resolvers'] ||= {}
-
- haproxy_config_resource.variables['resolvers'][new_resource.name] ||= {}
- haproxy_config_resource.variables['resolvers'].delete(new_resource.name) if haproxy_config_resource.variables['resolvers'].key?(new_resource.name)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/service.rb b/lc-gdn-chef/cookbooks/haproxy/resources/service.rb
deleted file mode 100644
index 50314f0f4d61970abbbef63a007c2662d346f7cb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/service.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-include Haproxy::Cookbook::Helpers
-
-use 'partial/_config_file'
-
-property :bin_prefix, String,
- default: '/usr',
- description: 'Bin location of the haproxy binary, defaults to /usr'
-
-property :service_name, String,
- default: 'haproxy'
-
-property :systemd_unit_content, [String, Hash],
- default: lazy { default_systemd_unit_content },
- description: 'HAProxy systemd unit contents'
-
-property :config_test, [true, false],
- default: true,
- description: 'Perform configuration file test before performing service action'
-
-property :config_test_fail_action, Symbol,
- equal_to: %i(raise log),
- default: :raise,
- description: 'Action to perform upon configuration test failure.'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::Helpers
-
- def do_service_action(resource_action)
- with_run_context(:root) do
- if %i(start restart reload).include?(resource_action)
- begin
- if new_resource.config_test && ::File.exist?(new_resource.config_file)
- log 'Running configuration test'
- cmd = Mixlib::ShellOut.new("#{systemd_command(new_resource.bin_prefix)} -c -V -f #{new_resource.config_file}")
- cmd.run_command.error!
- Chef::Log.info("Configuration test passed, creating #{new_resource.service_name} #{new_resource.declared_type} resource with action #{resource_action}")
- elsif new_resource.config_test && !::File.exist?(new_resource.config_file)
- log 'Configuration test is enabled but configuration file does not exist, skipping test' do
- level :warn
- end
- else
- Chef::Log.info("Configuration test disabled, creating #{new_resource.service_name} #{new_resource.declared_type} resource with action #{resource_action}")
- end
-
- declare_resource(:service, new_resource.service_name).delayed_action(resource_action)
- rescue Mixlib::ShellOut::ShellCommandFailed
- if new_resource.config_test_fail_action.eql?(:log)
- Chef::Log.error("Configuration test failed, #{new_resource.service_name} #{resource_action} action aborted!\n\n"\
- "Error\n-----\n#{cmd.stderr}")
- else
- raise "Configuration test failed, #{new_resource.service_name} #{resource_action} action aborted!\n\n"\
- "Error\n-----\nAction: #{resource_action}\n#{cmd.stderr}"
- end
- end
- else
- declare_resource(:service, new_resource.service_name).delayed_action(resource_action)
- end
- end
- end
-end
-
-action :create do
- with_run_context :root do
- declare_resource(:cookbook_file, '/etc/default/haproxy') do
- cookbook 'haproxy'
- source 'haproxy-default'
- owner 'root'
- group 'root'
- mode '0644'
- end
-
- declare_resource(:systemd_unit, "#{new_resource.service_name}.service") do
- content new_resource.systemd_unit_content
- triggers_reload true
- action :create
- end
- end
-end
-
-action :delete do
- with_run_context :root do
- declare_resource(:cookbook_file, '/etc/default/haproxy').action(:delete)
- declare_resource(:systemd_unit, "#{new_resource.service_name}.service").action(:delete)
- end
-end
-
-%i(start stop restart reload enable disable).each do |action_type|
- send(:action, action_type) { do_service_action(action) }
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/use_backend.rb b/lc-gdn-chef/cookbooks/haproxy/resources/use_backend.rb
deleted file mode 100644
index 0464043ed47480edd61713554461bf9c95df88da..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/use_backend.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-use 'partial/_config_file'
-
-property :use_backend, [String, Array],
- name_property: true,
- coerce: proc { |p| Array(p) },
- description: 'Switch to a specific backend if/unless an ACL-based condition is matched'
-
-property :section, String,
- required: true,
- equal_to: %w(frontend listen backend),
- description: 'The section where the acl(s) should be applied'
-
-property :section_name, String,
- required: true,
- description: 'The name of the specific frontend, listen or backend section'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables[new_resource.section] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['use_backend'] ||= []
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['use_backend'].push(new_resource.use_backend)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables[new_resource.section] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name] ||= {}
-
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['use_backend'] ||= []
- haproxy_config_resource.variables[new_resource.section][new_resource.section_name]['use_backend'].delete(new_resource.use_backend)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/resources/userlist.rb b/lc-gdn-chef/cookbooks/haproxy/resources/userlist.rb
deleted file mode 100644
index 5e9b2bdbad7df15f5cfb6c171ac017d2f4c22d82..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/resources/userlist.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-use 'partial/_config_file'
-
-property :group, Hash,
- description: 'Adds group to the current userlist'
-
-property :user, Hash,
- description: 'Adds user to the current userlist'
-
-unified_mode true
-
-action_class do
- include Haproxy::Cookbook::ResourceHelpers
-end
-
-action :create do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['userlist'] ||= {}
- haproxy_config_resource.variables['userlist'][new_resource.name] ||= {}
- haproxy_config_resource.variables['userlist'][new_resource.name]['group'] ||= []
- haproxy_config_resource.variables['userlist'][new_resource.name]['group'].push(new_resource.group) if property_is_set?(:group)
- haproxy_config_resource.variables['userlist'][new_resource.name]['user'] ||= []
- haproxy_config_resource.variables['userlist'][new_resource.name]['user'].push(new_resource.user) if property_is_set?(:user)
-end
-
-action :delete do
- haproxy_config_resource_init
-
- haproxy_config_resource.variables['userlist'] ||= {}
- haproxy_config_resource.variables['userlist'][new_resource.name] ||= {}
- haproxy_config_resource.variables['userlist'][new_resource.name]['group'] ||= []
- haproxy_config_resource.variables['userlist'][new_resource.name]['group'].delete(new_resource.group)
- haproxy_config_resource.variables['userlist'][new_resource.name]['user'] ||= []
- haproxy_config_resource.variables['userlist'][new_resource.name]['user'].delete(new_resource.user)
-end
diff --git a/lc-gdn-chef/cookbooks/haproxy/templates/default/haproxy.cfg.erb b/lc-gdn-chef/cookbooks/haproxy/templates/default/haproxy.cfg.erb
deleted file mode 100644
index 8744e2a2a18dc596bcb945cf7d9f22e865775731..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/haproxy/templates/default/haproxy.cfg.erb
+++ /dev/null
@@ -1,462 +0,0 @@
-<% unless nil_or_empty?(@global) %>
-global
- user <%= @global['user'] %>
- group <%= @global['group'] %>
-<% if @global['pid_file'] -%>
- pidfile <%= @global['pid_file'] %>
-<% end %>
-<% if @global['log'] && @global['log'].is_a?(Array)-%>
-<% @global['log'].each do | logvar | -%>
-<% if logvar && logvar.is_a?(Array)-%>
-<% logvar.each do | log | -%>
- log <%= log %>
-<% end -%>
-<% else -%>
- log <%= logvar %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% if @global['log_tag'] -%>
- log-tag <%= @global['log_tag'] %>
-<% end -%>
-<% if @global['chroot'] -%>
- chroot <%= @global['chroot'] %>
-<% end -%>
-<% if @global['daemon'].eql? 'true' -%>
- daemon
-<% end -%>
- <%= @global['debug_option'] %>
-<% @global['stats']&.each do |option, value| -%>
- stats <%= option %> <%= value %>
-<% end -%>
-<% unless nil_or_empty?(@global['maxconn']) -%>
- maxconn <%= @global['maxconn'] %>
-<% end -%>
-<% if @global['pidfile'] -%>
- pidfile <%= @global['pidfile'] %>
-<% end -%>
-<% unless nil_or_empty?(@global['tuning']) %>
-<% @global['tuning'].each do | option, value | -%>
- tune.<%= option %> <%= value %>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(@global['extra_options']) -%>
-<% @global['extra_options'].each do | option, value | -%>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= option %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= option %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(@resolvers) %>
-<% @resolvers.each do |resolver, r | %>
-
-
-resolvers <%= resolver %>
-<% unless nil_or_empty?(r['nameserver']) %>
-<% r['nameserver'].each do | s |%>
-<% s.each do |nameserver|%>
- nameserver <%= nameserver %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% r['extra_options'].each do | option, value | -%>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= option %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= option %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(@defaults) %>
-
-
-defaults
-<% @defaults['timeout'].each do | key, timeout | -%>
- timeout <%= key %> <%= timeout %>
-<% end -%>
- log <%= @defaults['log'] %>
- mode <%= @defaults['mode'] %>
-<% unless nil_or_empty?(@defaults['balance']) -%>
- balance <%= @defaults['balance'] %>
-<% end -%>
-<% if @defaults['hash_type'] -%>
- hash-type <%= @defaults['hash_type'] %>
-<% end -%>
-<% @defaults['option'].each do | option | -%>
- option <%= option %>
-<% end -%>
-<% unless nil_or_empty?(@defaults['maxconn']) -%>
- maxconn <%= @defaults['maxconn'] %>
-<% end -%>
-<% unless nil_or_empty?(@defaults['retries']) -%>
- retries <%= @defaults['retries'] %>
-<% end -%>
-<% @defaults['stats']&.each do |option, value| -%>
- stats <%= option %> <%= value %>
-<% end -%>
-<% unless nil_or_empty?(@defaults['extra_options']) %>
-<% @defaults['extra_options'].each do | option, value | -%>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= option %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= option %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(@cache) %>
-<% @cache.each do |cache, c | %>
-
-
-cache <%= cache %>
-<% unless nil_or_empty?(c['total_max_size']) -%>
- total-max-size <%= c['total_max_size'] %>
-<% end -%>
-<% unless nil_or_empty?(c['max_object_size']) %>
- max-object-size <%= c['max_object_size'] %>
-<% end -%>
-<% unless nil_or_empty?(c['max_age']) %>
- max-age <%= c['max_age'] %>
-<% end -%>
-<% end # cache loop -%>
-<% end # cache -%>
-<% unless nil_or_empty?(@userlist) %>
-<% @userlist.each do |userlist, u| %>
-
-
-userlist <%= userlist %>
-<% u.each do |type,i| -%>
-<% i.each do |item| -%>
-<% item.each do |k,v| %>
- <%= type %> <%=k%> <%= v %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(@peer) %>
-<% @peer.each do |peer, pv | %>
-
-peers <%= peer %>
-<% if pv['bind'].is_a?(Hash) -%>
-<% pv['bind'].each do |k, v| -%>
- bind <%= "#{k} #{v}".strip %>
-<% end -%>
-<% else -%>
- bind <%= pv['bind'] %>
-<% end -%>
-<% unless nil_or_empty?(pv['state']) %>
- <%= pv['state'] %>
-<% end -%>
-<% unless nil_or_empty?(pv['server']) %>
-<% pv['server'].each do | s |%>
-<% s.each do |server|%>
- server <%= server %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(pv['default_bind']) %>
- default-bind <%= pv['default_bind'] %>
-<% end -%>
-<% unless nil_or_empty?(pv['default_server']) %>
- default-server <%= pv['default_server'] %>
-<% end -%>
-<% unless nil_or_empty?(pv['table']) %>
-<% pv['table'].each do | t |%>
-<% t.each do |table|%>
- table <%= table %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(pv['extra_options']) %>
-<% pv['extra_options'].each do | key, value |%>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end # peers loop -%>
-<% end # peers -%>
-<% unless nil_or_empty?(@mailer) %>
-<% @mailer.each do |mailer, m | %>
-
-mailers <%= mailer %>
-<% unless nil_or_empty?(m['mailer']) -%>
-<% m['mailer'].each do |mail| -%>
- mailer <%= mail %>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(m['timeout']) %>
- timeout mail <%= m['timeout'] %>
-<% end -%>
-<% end # mailers loop -%>
-<% end # mailers -%>
-<% unless nil_or_empty?(@fastcgi) %>
-<% @fastcgi.each do |fastcgi, f | %>
-
-
-fcgi-app <%= fastcgi %>
-<% unless nil_or_empty?(f['docroot']) %>
- docroot <%= f['docroot'] %>
-<% end -%>
-<% unless nil_or_empty?(f['index']) %>
- index <%= f['index'] %>
-<% end -%>
-<% unless nil_or_empty?(f['log_stderr']) %>
- log-stderr <%= f['log_stderr'] %>
-<% end -%>
-<% unless nil_or_empty?(f['option']) %>
-<% f['option'].each do | option |%>
-<% option.each do | option | %>
- option <%= option %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(f['extra_options']) %>
-<% f['extra_options'].each do | key, value | %>
-<% unless key == 'http-request' %>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end # fastcgi loop -%>
-<% end # fastcgi -%>
-<% unless nil_or_empty?(@frontend) %>
-<% @frontend.each do |frontend, f | %>
-
-
-frontend <%= frontend %>
-<% unless nil_or_empty?(f['mode']) -%>
- mode <%= f['mode'] %>
-<% end -%>
-<% unless nil_or_empty?(f['default_backend']) %>
- default_backend <%= f['default_backend'] %>
-<% end -%>
-<% f['bind'].each do |binding| -%>
- bind <%= binding %>
-<% end -%>
-<% unless nil_or_empty?(f['maxconn']) -%>
- maxconn <%= f['maxconn'] %>
-<% end -%>
-<% f['stats']&.each do |option, value| -%>
- stats <%= option %> <%= value %>
-<% end -%>
-<% unless nil_or_empty?(f['acl']) %>
-<% f['acl'].flatten.uniq.each do | acl |%>
- acl <%= acl %>
-<% end -%>
-<% end -%>
-<% if f['reqrep'] -%>
-<% f['reqrep'].each do |reqrep| %>
- reqrep <%= reqrep %>
-<% end -%>
-<% end %>
-<% if f['reqirep'] -%>
-<% f['reqirep'].each do |reqirep| %>
- reqirep <%= reqirep %>
-<% end -%>
-<% end %>
-<% unless nil_or_empty?(f['extra_options']) %>
-<% f['extra_options'].each do | key, value | %>
-<% if key == 'http-request' %>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(f['use_backend']) %>
-<% f['use_backend'].flatten.uniq.each do | backend |%>
- use_backend <%= backend %>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(f['option']) %>
-<% f['option'].each do | option |%>
-<% option.each do | option | %>
- option <%= option %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(f['extra_options']) %>
-<% f['extra_options'].each do | key, value | %>
-<% unless key == 'http-request' %>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end # frontend loop -%>
-<% end # frontend -%>
-<% unless nil_or_empty?(@backend) %>
-<% @backend.each do | key, backend | %>
-
-
-backend <%= key %>
-<% unless nil_or_empty?(backend['mode']) -%>
- mode <%= backend['mode'] %>
-<% end -%>
-<% unless nil_or_empty?(backend['server']) %>
-<% backend['server'].each do | s |%>
-<% s.each do |server|%>
- server <%= server %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(backend['acl']) %>
-<% backend['acl'].flatten.uniq.each do | acl |%>
- acl <%= acl %>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(backend['option']) %>
-<% backend['option'].each do | option |%>
-<% option.each do | option | %>
- option <%= option %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(backend['tcp_request']) %>
-<% backend['tcp_request'].each do | tcp_request |%>
-<% tcp_request.each do | tcp_request | %>
- tcp-request <%= tcp_request %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% if backend['hash_type'] -%>
- hash-type <%= backend['hash_type'] %>
-<% end -%>
-<% if backend['reqrep'] -%>
-<% backend['reqrep'].each do |reqrep| %>
- reqrep <%= reqrep %>
-<% end -%>
-<% end %>
-<% if backend['reqirep'] -%>
-<% backend['reqirep'].each do |reqirep| %>
- reqirep <%= reqirep %>
-<% end -%>
-<% end %>
-<% unless nil_or_empty?(backend['extra_options']) %>
-<% backend['extra_options'].each do | key, value |%>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end # backend loop -%>
-<% end # backend -%>
-<% unless nil_or_empty?(@listen) %>
-<% @listen.each do | key, listen |%>
-
-
-listen <%= key %>
-<% unless nil_or_empty?(listen['mode']) -%>
- mode <%= listen['mode']%>
-<% end -%>
-<% listen['bind'].each do |binding| -%>
- bind <%= binding %>
-<% end -%>
-<% unless nil_or_empty?(listen['maxconn']) -%>
- maxconn <%= listen['maxconn']%>
-<% end -%>
-<% listen['stats']&.each do |option, value| -%>
- stats <%= option %> <%= value %>
-<% end -%>
-<% unless nil_or_empty?(listen['acl']) %>
-<% listen['acl'].flatten.uniq.each do | acl |%>
- acl <%= acl %>
-<% end -%>
-<% end -%>
-<% if listen['http_request'] -%>
-<% listen['http_request'].each do |http_request| %>
- http-request <%= http_request %>
-<% end -%>
-<% end %>
-<% if listen['http_response'] -%>
- http-response <%= listen['http_response'] %>
-<% end %>
-<% if listen['reqrep'] -%>
-<% listen['reqrep'].each do |reqrep| %>
- reqrep <%= reqrep %>
-<% end -%>
-<% end %>
-<% if listen['reqirep'] -%>
-<% listen['reqirep'].each do |reqirep| %>
- reqirep <%= reqirep %>
-<% end -%>
-<% end %>
-<% if listen['default_backend'] -%>
- default_backend <%= listen['default_backend'] %>
-<% end %>
-<% unless nil_or_empty?(listen['extra_options']) %>
-<% listen['extra_options'].each do | key, value | %>
-<% if key == 'http-request' %>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(listen['use_backend']) %>
-<% listen['use_backend'].flatten.uniq.each do | backend |%>
- use_backend <%= backend %>
-<% end -%>
-<% end -%>
-<% unless nil_or_empty?(listen['extra_options']) %>
-<% listen['extra_options'].each do | key, value | %>
-<% unless key == 'http-request' %>
-<% if value.is_a?(Array) %>
-<% value.each do | array_element | %>
- <%= key %> <%= array_element %>
-<% end -%>
-<% else %>
- <%= key %> <%= value %>
-<% end -%>
-<% end -%>
-<% end -%>
-<% end -%>
-<% listen['server']&.each do | s |%>
-<% s.each do |server|%>
- server <%= server %>
-<% end -%>
-<% end -%>
-<% if listen['hash_type'] -%>
- hash-type <%= listen['hash_type'] %>
-<% end -%>
-<% end # listen loop -%>
-<% end # listen -%>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/README.md b/lc-gdn-chef/cookbooks/hbase_cluster/README.md
deleted file mode 100644
index 7f7b9192be5fbff022a30c71123d5bd10d01977c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/README.md
+++ /dev/null
@@ -1,272 +0,0 @@
-# hbase chef cookbook
-
-HBase: a massively-scalable high-throughput datastore based on the Hadoop HDFS
-
-## Overview
-
-Installs/Configures HBase
-
-## Recipes
-
-* `backup_tables` - Cron job to backup tables to S3
-* `config` - Finalizes the config, writes out the config files
-* `dashboard` - Simple dashboard for HBase config and state
-* `default` - Base configuration for hbase
-* `master` - HBase Master
-* `regionserver` - HBase Regionserver
-* `stargate` - HBase Stargate: HTTP frontend to HBase
-* `thrift` - HBase Thrift Listener
-
-## Integration
-
-Supports platforms: debian and ubuntu
-
-Cookbook dependencies:
-* java
-* apt
-* runit
-* volumes
-* metachef
-* dashpot
-* hadoop_cluster
-* zookeeper
-* ganglia
-
-
-## Attributes
-
-* `[:groups][:hbase][:gid]` - (default: "304")
-* `[:hbase][:tmp_dir]` - (default: "/mnt/hbase/tmp")
-* `[:hbase][:home_dir]` - (default: "/usr/lib/hbase")
-* `[:hbase][:conf_dir]` - (default: "/etc/hbase/conf")
-* `[:hbase][:log_dir]` - (default: "/var/log/hbase")
-* `[:hbase][:pid_dir]` - (default: "/var/run/hbase")
-* `[:hbase][:weekly_backup_tables]` -
-* `[:hbase][:backup_location]` - (default: "/mnt/hbase/bkup")
-* `[:hbase][:master][:java_heap_size_max]` - (default: "1000m")
- - total size of the JVM heap (master)
-* `[:hbase][:master][:java_heap_size_new]` - (default: "256m")
- - size of the JVM "New Generation/Eden" heap segment (master)
-* `[:hbase][:master][:gc_tuning_opts]` - (default: "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts")
- - JVM garbage collection tuning for the hbase master
-* `[:hbase][:master][:gc_log_opts]` - (default: "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps")
- - What details to log about JVM garbage collection statistics for the hbase master
-* `[:hbase][:master][:run_state]` - (default: "start")
-* `[:hbase][:master][:port]` - (default: "60000")
-* `[:hbase][:master][:dash_port]` - (default: "60010")
-* `[:hbase][:master][:jmx_dash_port]` - (default: "10101")
-* `[:hbase][:regionserver][:java_heap_size_max]` - (default: "2000m")
- - total size of the JVM heap (regionserver)
-* `[:hbase][:regionserver][:java_heap_size_new]` - (default: "256m")
- - size of the JVM "New Generation/Eden" heap segment (regionserver)
-* `[:hbase][:regionserver][:gc_tuning_opts]` - (default: "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts -XX:CMSInitiatingOccupancyFraction=88")
- - JVM garbage collection tuning for the hbase regionserver
-* `[:hbase][:regionserver][:gc_log_opts]` - (default: "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps")
- - What details to log about JVM garbage collection statistics for the hbase regionserver
-* `[:hbase][:regionserver][:run_state]` - (default: "start")
-* `[:hbase][:regionserver][:port]` - (default: "60020")
-* `[:hbase][:regionserver][:dash_port]` - (default: "60030")
-* `[:hbase][:regionserver][:jmx_dash_port]` - (default: "10102")
-* `[:hbase][:regionserver][:lease_period]` - (default: "60000")
- - hbase.regionserver.lease.period (default 60000) -- HRegion server lease period in
- milliseconds. Default is 60 seconds. Clients must report in within this period else
- they are considered dead.
-* `[:hbase][:regionserver][:handler_count]` - (default: "10")
- - hbase.regionserver.handler.count (default 10) -- Count of RPC Server instances spun up
- on RegionServers. Same property is used by the Master for count of master handlers.
-* `[:hbase][:regionserver][:split_limit]` - (default: "2147483647")
- - hbase.regionserver.regionSplitLimit (default 2147483647) -- Limit for the number of
- regions after which no more region splitting should take place. This is not a hard
- limit for the number of regions but acts as a guideline for the regionserver to stop
- splitting after a certain limit. Default is set to MAX_INT; i.e. do not block
- splitting.
-* `[:hbase][:regionserver][:msg_period]` - (default: "3000")
- - hbase.regionserver.msginterval (default 3000) -- Interval between messages from the
- RegionServer to Master in milliseconds.
-* `[:hbase][:regionserver][:log_flush_period]` - (default: "1000")
- - hbase.regionserver.optionallogflushinterval (default 1000) -- Sync the HLog to the HDFS
- after this interval if it has not accumulated enough entries to trigger a
- sync. Default 1 second. Units: milliseconds.
-* `[:hbase][:regionserver][:logroll_period]` - (default: "3600000")
- - hbase.regionserver.logroll.period (default 3600000) -- Period at which we will roll the
- commit log regardless of how many edits it has.
-* `[:hbase][:regionserver][:split_check_period]` - (default: "20000")
- - hbase.regionserver.thread.splitcompactcheckfrequency (default 20000) -- How often a
- region server runs the split/compaction check.
-* `[:hbase][:regionserver][:worker_period]` - (default: "10000")
- - hbase.server.thread.wakefrequency (default 10_000) -- Time to sleep in between searches
- for work (in milliseconds). Used as sleep interval by service threads such as log roller.
-* `[:hbase][:regionserver][:balancer_period]` - (default: "300000")
- - hbase.balancer.period (default 300000) -- Period at which the region balancer runs in
- the Master.
-* `[:hbase][:regionserver][:balancer_slop]` - (default: "0")
- - hbase.regions.slop (default 0) -- Rebalance if any regionserver has more than
- average + (average * slop) regions
-* `[:hbase][:regionserver][:max_filesize]` - (default: "268435456")
- - hbase.hregion.max.filesize (default 268435456) -- Maximum HStoreFile size. If any one of
- a column families' HStoreFiles has grown to exceed this value, the hosting HRegion is
- split in two.
-* `[:hbase][:regionserver][:hfile_block_size]` - (default: "65536")
- - hbase.mapreduce.hfileoutputformat.blocksize (default 65536) -- The mapreduce
- HFileOutputFormat writes storefiles/hfiles. This is the minimum hfile blocksize to
- emit. Usually in hbase, writing hfiles, the blocksize is gotten from the table schema
- (HColumnDescriptor) but in the mapreduce outputformat context, we don't have access to
- the schema so get blocksize from Configuation. The smaller you make the blocksize,
- the bigger your index and the less you fetch on a random-access. Set the blocksize
- down if you have small cells and want faster random-access of individual cells.
-* `[:hbase][:regionserver][:required_codecs]` -
- - hbase.regionserver.codecs (default "") -- To have a RegionServer test a set of
- codecs and fail-to-start if any code is missing or misinstalled, add the
- configuration hbase.regionserver.codecs to your hbase-site.xml with a value of
- codecs to test on startup. For example if the hbase.regionserver.codecs value
- is "lzo,gz" and if lzo is not present or improperly installed, the misconfigured
- RegionServer will fail to start.
-* `[:hbase][:regionserver][:block_cache_size]` - (default: "0.2")
- - hfile.block.cache.size (default 0.2) -- Percentage of maximum heap (-Xmx setting) to
- allocate to block cache used by HFile/StoreFile. Default of 0.2 means allocate 20%.
- Set to 0 to disable.
-* `[:hbase][:regionserver][:hash_type]` - (default: "murmur")
- - hbase.hash.type (default murmur) -- The hashing algorithm for use in HashFunction. Two
- values are supported now: murmur (MurmurHash) and jenkins (JenkinsHash). Used by
- bloom filters.
-* `[:hbase][:stargate][:run_state]` - (default: "start")
-* `[:hbase][:stargate][:port]` - (default: "8080")
-* `[:hbase][:stargate][:jmx_dash_port]` - (default: "10105")
-* `[:hbase][:stargate][:readonly]` -
- - hbase.rest.readonly (default false) -- Defines the mode the REST server will be started
- in. Possible values are: false: All HTTP methods are permitted - GET/PUT/POST/DELETE.
- true: Only the GET method is permitted.
-* `[:hbase][:thrift][:run_state]` - (default: "start")
-* `[:hbase][:thrift][:jmx_dash_port]` - (default: "10104")
-* `[:hbase][:zookeeper][:jmx_dash_port]` - (default: "10103")
-* `[:hbase][:zookeeper][:peer_port]` - (default: "2888")
-* `[:hbase][:zookeeper][:leader_port]` - (default: "3888")
-* `[:hbase][:zookeeper][:client_port]` - (default: "2181")
-* `[:hbase][:zookeeper][:session_timeout]` - (default: "180000")
- - zookeeper.session.timeout (default 180_000) -- ZooKeeper session timeout. HBase passes
- this to the zk quorum as suggested maximum time for a session. See
- http://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions
- "The client sends a requested timeout, the server responds with the timeout that it
- can give the client. " In milliseconds.
-* `[:hbase][:zookeeper][:znode_parent]` - (default: "/hbase")
- - zookeeper.znode.parent (default "/hbase") -- Root ZNode for HBase in
- ZooKeeper. All of HBase's ZooKeeper files that are configured with a
- relative path will go under this node. By default, all of HBase's ZooKeeper
- file path are configured with a relative path, so they will all go under
- this directory unless changed.
-* `[:hbase][:zookeeper][:znode_rootserver]` - (default: "root-region-server")
- - zookeeper.znode.rootserver (default root-region-server) -- Path to ZNode
- holding root region location. This is written by the master and read by
- clients and region servers. If a relative path is given, the parent folder
- will be ${zookeeper.znode.parent}. By default, this means the root location
- is stored at /hbase/root-region-server.
-* `[:hbase][:zookeeper][:max_client_connections]` - (default: "2000")
- - hbase.zookeeper.property.maxClientCnxns (default 2000) -- Limit on number of concurrent
- connections (at the socket level) that a single client, identified by IP address, may
- make to a single member of the ZooKeeper ensemble. Set high to avoid zk connection
- issues running standalone and pseudo-distributed.
-* `[:hbase][:client][:write_buffer]` - (default: "2097152")
- - hbase.client.write.buffer (default 2097152) Default size of the HTable client write
- buffer in bytes. A bigger buffer takes more memory -- on both the client and server
- side since server instantiates the passed write buffer to process it -- but a larger
- buffer size reduces the number of RPCs made. For an estimate of server-side
- memory-used, evaluate
- hbase.client.write.buffer * hbase.regionserver.handler.count
-* `[:hbase][:client][:pause_period_ms]` - (default: "1000")
- - hbase.client.pause (default 1000) -- General client pause value. Used mostly as value
- to wait before running a retry of a failed get, region lookup, etc.
-* `[:hbase][:client][:retry_count]` - (default: "10")
- - hbase.client.retries.number (default 10) -- Maximum retries. Used as maximum for all
- retryable operations such as fetching of the root region from root region server,
- getting a cell's value, starting a row update, etc.
-* `[:hbase][:client][:scanner_prefetch_rows]` - (default: "1")
- - hbase.client.scanner.caching (default 1) -- Number of rows that will be fetched when
- calling next on a scanner if it is not served from (local, client) memory. Higher
- caching values will enable faster scanners but will eat up more memory and some calls
- of next may take longer and longer times when the cache is empty. Do not set this
- value such that the time between invocations is greater than the scanner timeout;
- i.e. hbase.regionserver.lease.period
-* `[:hbase][:client][:max_keyvalue_size]` - (default: "10485760")
- - hbase.client.keyvalue.maxsize (default 10485760) -- Specifies the combined maximum
- allowed size of a KeyValue instance. This is to set an upper boundary for a single
- entry saved in a storage file. Since they cannot be split it helps avoiding that a
- region cannot be split any further because the data is too large. It seems wise to set
- this to a fraction of the maximum region size. Setting it to zero or less disables the
- check.
-* `[:hbase][:memstore][:flush_upper_heap_pct]` - (default: "0.4")
- - hbase.regionserver.global.memstore.upperLimit (default 0.4) -- Maximum size of all
- memstores in a region server before new updates are blocked and flushes are
- forced. Defaults to 40% of heap
-* `[:hbase][:memstore][:flush_lower_heap_pct]` - (default: "0.35")
- - hbase.regionserver.global.memstore.lowerLimit (default 0.35) -- When memstores are being
- forced to flush to make room in memory, keep flushing until we hit this mark. Defaults
- to 35% of heap. This value equal to hbase.regionserver.global.memstore.upperLimit
- causes the minimum possible flushing to occur when updates are blocked due to memstore
- limiting.
-* `[:hbase][:memstore][:flush_size_trigger]` - (default: "67108864")
- - hbase.hregion.memstore.flush.size (default 67108864) -- Memstore will be flushed to disk
- if size of the memstore exceeds this number of bytes. Value is checked by a thread
- that runs every hbase.server.thread.wakefrequency.
-* `[:hbase][:memstore][:preflush_trigger]` - (default: "5242880")
- - hbase.hregion.preclose.flush.size (default 5 mb) -- If the memstores in a region are
- this size or larger when we go to close, run a "pre-flush" to clear out memstores
- before we put up the region closed flag and take the region offline. On close, a
- flush is run under the close flag to empty memory. During this time the region is
- offline and we are not taking on any writes. If the memstore content is large, this
- flush could take a long time to complete. The preflush is meant to clean out the bulk
- of the memstore before putting up the close flag and taking the region offline so the
- flush that runs under the close flag has little to do.
-* `[:hbase][:memstore][:flush_stall_trigger]` - (default: "8")
- - hbase.hregion.memstore.block.multiplier (default 2) -- Block updates if memstore has
- hbase.hregion.block.memstore time hbase.hregion.flush.size bytes. Useful preventing
- runaway memstore during spikes in update traffic. Without an upper-bound, memstore
- fills such that when it flushes the resultant flush files take a long time to compact
- or split, or worse, we OOME.
-* `[:hbase][:memstore][:mslab_enabled]` -
- - hbase.hregion.memstore.mslab.enabled (default false) -- Experimental: Enables the
- MemStore-Local Allocation Buffer, a feature which works to prevent heap fragmentation
- under heavy write loads. This can reduce the frequency of stop-the-world GC pauses on
- large heaps.
-* `[:hbase][:compaction][:files_trigger]` - (default: "3")
- - hbase.hstore.compactionThreshold (default 3) -- If more than this number of HStoreFiles
- in any one HStore (one HStoreFile is written per flush of memstore) then a compaction
- is run to rewrite all HStoreFiles files as one. Larger numbers put off compaction but
- when it runs, it takes longer to complete.
-* `[:hbase][:compaction][:pause_trigger]` - (default: "7")
- - hbase.hstore.blockingStoreFiles (default 7) -- If more than this number of StoreFiles in
- any one Store (one StoreFile is written per flush of MemStore) then updates are
- blocked for this HRegion until a compaction is completed, or until
- hbase.hstore.blockingWaitTime has been exceeded.
-* `[:hbase][:compaction][:pause_time]` - (default: "90000")
- - hbase.hstore.blockingWaitTime (default 90_000) -- The time an HRegion will block updates
- for after hitting the StoreFile limit defined by hbase.hstore.blockingStoreFiles.
- After this time has elapsed, the HRegion will stop blocking updates even if a
- compaction has not been completed. Default: 90 seconds.
-* `[:hbase][:compaction][:max_combine_files]` - (default: "10")
- - hbase.hstore.compaction.max (default 10) -- Max number of HStoreFiles to compact per
- 'minor' compaction.
-* `[:hbase][:compaction][:period]` - (default: "86400000")
- - hbase.hregion.majorcompaction (default 86400000) -- The time (in miliseconds) between
- 'major' compactions of all HStoreFiles in a region. Default: 1 day. Set to 0 to
- disable automated major compactions.
-* `[:users][:hbase][:uid]` - (default: "304")
-* `[:tuning][:ulimit][:hbase]` -
-
-## License and Author
-
-Author:: Chris Howe - Infochimps, Inc ()
-Copyright:: 2011, Chris Howe - Infochimps, Inc
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-> readme generated by [cluster_chef](http://github.com/infochimps/cluster_chef)'s cookbook_munger
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/TODO b/lc-gdn-chef/cookbooks/hbase_cluster/TODO
deleted file mode 100644
index 201270a906a974d46b2d6eed4fe2b7cc1abcf9d9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/TODO
+++ /dev/null
@@ -1,64 +0,0 @@
-
-* (DONE) zookeeper data => /data/db/zookeeper/
-* chown zookeeper:zookeeper /var/zookeeper
-* zookeeper txlog => ??somewhere??
-* (DONE) hbase site.xml => fix the hdfs to be a hostname not IP
-
-
-Mozilla's Socorro project uses HBase as the primary data store for crash reports. The current production cluster is 17+2 nodes. We are moving the app to a new datacenter in the next two months and will be spinning up a 30 node cluster there.
-
-We are currently running 0.20.6, but are striving to upgrade to .89 quickly to pick up some important fixes for issues we are hitting.
-
-We have 17k regions. We store about 2.5M crash reports per day and process 10% of them which are available on the live http://crash-stats.mozilla.com/ site.
-
-Our servers are dual quad cores with 24GB of RAM and 3 1.5TB disks.
-
-17k regions
-2.5M docs/day
-
-17 * 8 cores / 24GB / 4.5TB
-
-
-http://hbase.markmail.org/thread/sp5p7yc73l3kjhuk
-
-
-
-
----------------------------------------------------------------------------
-
-
-flying monkey => hbase
-* flume?
-* rest?
-
-hbase write into table - while read from other tables?
-hbase LZO
-
-
- Java --N--> HBase
- Java --∞--> HBase
- HBase --1--> Ruby
-
- HBase --N--> Pig
- Ruby --1--> HBase
- Pig --N--> Hbase
-
-
-twitter datasets loaded
-apeyeye datasets loaded
-apeyeye handler
-hbase => trstrank
-
-schemas
-- size cluster in regions? blocks? NN ram? hb master ram? rs ram?
-
-monitoring
-extra cluster roles
-lzo?
-
-nn metadata safety
-ebs volumes
-
-compute vs production clusters -- how? S3?
-
-
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/attributes/default.rb b/lc-gdn-chef/cookbooks/hbase_cluster/attributes/default.rb
deleted file mode 100644
index 40489571d9d5a5bc74b75d09d4db0747008eed21..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/attributes/default.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-#
-# See also hbase/attributes/tunables.rb
-#
-
-#
-# Locations
-#
-
-default[:hbase][:home_dir] = '/usr/lib/hbase'
-default[:hbase][:conf_dir] = '/etc/hbase/conf'
-default[:hbase][:pid_dir] = '/var/run/hbase'
-
-default[:hbase][:log_dir] = '/var/log/hbase'
-default[:hbase][:tmp_dir] = '/mnt/hbase/tmp'
-
-default[:hbase][:master ][:port] = 60000
-default[:hbase][:regionserver][:port] = 60020
-default[:hbase][:stargate ][:port] = 8080
-
-default[:hbase][:master ][:dash_port] = 60010
-default[:hbase][:regionserver][:dash_port] = 60030
-
-default[:hbase][:master ][:jmx_dash_port] = 10101
-default[:hbase][:regionserver][:jmx_dash_port] = 10102
-default[:hbase][:zookeeper ][:jmx_dash_port] = 10103
-default[:hbase][:thrift ][:jmx_dash_port] = 10104
-default[:hbase][:stargate ][:jmx_dash_port] = 10105
-
-default[:hbase][:zookeeper ][:peer_port] = 2888
-default[:hbase][:zookeeper ][:leader_port] = 3888
-default[:hbase][:zookeeper ][:client_port] = 2181
-
-# default[:hbase][:regionserver][:dash_addr] = 0.0.0.0 # hbase.regionserver.info.bindAddress (default 0.0.0.0) -- The address for the HBase RegionServer web UI
-# default[:hbase][:zookeeper ][:addr] = default # hbase.zookeeper.dns.interface (default default) -- The name of the Network Interface from which a ZooKeeper server should report its IP address.
-# default[:hbase][:regionserver][:addr] = default # hbase.regionserver.dns.interface (default default) -- The name of the Network Interface from which a region server should report its IP address.
-# default[:hbase][:master ][:addr] = default # hbase.master.dns.interface (default default) -- The name of the Network Interface from which a master should report its IP address.
-# default[:hbase][:nameserver] = default # hbase.*.dns.nameserver (default default) -- The host name or IP address of the name server (DNS) which an hbase component should use to determine the host name used by the master for communication and display purposes.
-
-# these are set by the recipes
-node[:hbase][:exported_jars] ||= []
-node[:hbase][:exported_confs] ||= []
-
-#
-# Users
-#
-
-# hbase user
-node[:hbase][:user] = 'hbase'
-default[:users ]['hbase' ][:uid] = 304
-default[:groups]['hbase' ][:gid] = 304
-
-#
-# Run state of daemons
-#
-
-node[:hbase][:services] = [ :master, :regionserver, :stargate, :thrift ]
-
-default[:hbase][:master ][:run_state] = :start
-default[:hbase][:regionserver][:run_state] = :start
-default[:hbase][:thrift ][:run_state] = :start
-default[:hbase][:stargate ][:run_state] = :start
-
-#
-# HBase Backup
-#
-
-default[:hbase][:backup_location] = '/mnt/hbase/bkup'
-default[:hbase][:weekly_backup_tables] = []
-
-#
-# Stargate
-#
-
-# hbase.rest.readonly (default false) -- Defines the mode the REST server will be started
-# in. Possible values are: false: All HTTP methods are permitted - GET/PUT/POST/DELETE.
-# true: Only the GET method is permitted.
-#
-default[:hbase][:stargate][:readonly] = false ## false
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/attributes/tunables.rb b/lc-gdn-chef/cookbooks/hbase_cluster/attributes/tunables.rb
deleted file mode 100644
index 897e147d18f2ae789e19a92bb94a476c0bb53d58..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/attributes/tunables.rb
+++ /dev/null
@@ -1,298 +0,0 @@
-
-#
-# Tunables
-#
-
-# set hbase user's ulimits for heavy usage
-default[:tuning][:ulimit]['hbase'] = { :nofile => { :both => 32768 }, :nproc => { :both => 50000 } }
-
-# total size of the JVM heap (master)
-default[:hbase][:master ][:java_heap_size_max] = "1000m"
-# total size of the JVM heap (regionserver)
-default[:hbase][:regionserver][:java_heap_size_max] = "2000m"
-# size of the JVM "New Generation/Eden" heap segment (master)
-default[:hbase][:master ][:java_heap_size_new] = "256m"
-# size of the JVM "New Generation/Eden" heap segment (regionserver)
-default[:hbase][:regionserver][:java_heap_size_new] = "256m"
-
-# -XX:+UseParNewGC
-
-# JVM garbage collection tuning for the hbase master
-default[:hbase][:master ][:gc_tuning_opts] = "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts"
-# JVM garbage collection tuning for the hbase regionserver
-default[:hbase][:regionserver][:gc_tuning_opts] = "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts -XX:CMSInitiatingOccupancyFraction=88"
-
-# What details to log about JVM garbage collection statistics for the hbase master
-default[:hbase][:master ][:gc_log_opts] = "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps"
-# What details to log about JVM garbage collection statistics for the hbase regionserver
-default[:hbase][:regionserver][:gc_log_opts] = "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps"
-
-#
-# !!!NOTE!!!!
-#
-# The slightly-humanized names below are under review and subject to
-# modification until cluster_chef v3 is released. We may change them
-# until then but won't change them after
-#
-#
-
-# hbase.client.write.buffer (default 2097152) Default size of the HTable client write
-# buffer in bytes. A bigger buffer takes more memory -- on both the client and server
-# side since server instantiates the passed write buffer to process it -- but a larger
-# buffer size reduces the number of RPCs made. For an estimate of server-side
-# memory-used, evaluate
-#
-# hbase.client.write.buffer * hbase.regionserver.handler.count
-#
-default[:hbase][:client][:write_buffer] = 2097152 ## 2097152
-
-# hbase.client.pause (default 1000) -- General client pause value. Used mostly as value
-# to wait before running a retry of a failed get, region lookup, etc.
-#
-default[:hbase][:client][:pause_period_ms] = 1000 ## 1000
-
-# hbase.client.retries.number (default 10) -- Maximum retries. Used as maximum for all
-# retryable operations such as fetching of the root region from root region server,
-# getting a cell's value, starting a row update, etc.
-#
-default[:hbase][:client][:retry_count] = 10 ## 10
-
-# hbase.client.scanner.caching (default 1) -- Number of rows that will be fetched when
-# calling next on a scanner if it is not served from (local, client) memory. Higher
-# caching values will enable faster scanners but will eat up more memory and some calls
-# of next may take longer and longer times when the cache is empty. Do not set this
-# value such that the time between invocations is greater than the scanner timeout;
-# i.e. hbase.regionserver.lease.period
-#
-default[:hbase][:client][:scanner_prefetch_rows] = 1 ## 1
-
-# hbase.client.keyvalue.maxsize (default 10485760) -- Specifies the combined maximum
-# allowed size of a KeyValue instance. This is to set an upper boundary for a single
-# entry saved in a storage file. Since they cannot be split it helps avoiding that a
-# region cannot be split any further because the data is too large. It seems wise to set
-# this to a fraction of the maximum region size. Setting it to zero or less disables the
-# check.
-#
-default[:hbase][:client][:max_keyvalue_size] = 10485760 ## 10485760
-
-# hbase.regionserver.lease.period (default 60000) -- HRegion server lease period in
-# milliseconds. Default is 60 seconds. Clients must report in within this period else
-# they are considered dead.
-#
-default[:hbase][:regionserver][:lease_period] = 60000 ## 60000
-
-# hbase.regionserver.handler.count (default 10) -- Count of RPC Server instances spun up
-# on RegionServers. Same property is used by the Master for count of master handlers.
-#
-default[:hbase][:regionserver][:handler_count] = 10 ## 10
-
-# hbase.regionserver.regionSplitLimit (default 2147483647) -- Limit for the number of
-# regions after which no more region splitting should take place. This is not a hard
-# limit for the number of regions but acts as a guideline for the regionserver to stop
-# splitting after a certain limit. Default is set to MAX_INT; i.e. do not block
-# splitting.
-#
-default[:hbase][:regionserver][:split_limit] = 2_147_483_647 ## 2_147_483_647
-
-# hbase.regionserver.msginterval (default 3000) -- Interval between messages from the
-# RegionServer to Master in milliseconds.
-#
-default[:hbase][:regionserver][:msg_period] = 3_000 ## 3000
-
-# hbase.regionserver.optionallogflushinterval (default 1000) -- Sync the HLog to the HDFS
-# after this interval if it has not accumulated enough entries to trigger a
-# sync. Default 1 second. Units: milliseconds.
-#
-default[:hbase][:regionserver][:log_flush_period] = 1_000 ## 1_000
-
-# hbase.regionserver.logroll.period (default 3600000) -- Period at which we will roll the
-# commit log regardless of how many edits it has.
-#
-default[:hbase][:regionserver][:logroll_period] = 3_600_000 ## 3_600_000
-
-# hbase.regionserver.thread.splitcompactcheckfrequency (default 20000) -- How often a
-# region server runs the split/compaction check.
-#
-default[:hbase][:regionserver][:split_check_period] = 20_000 ## 20_000
-
-# hbase.server.thread.wakefrequency (default 10_000) -- Time to sleep in between searches
-# for work (in milliseconds). Used as sleep interval by service threads such as log roller.
-#
-default[:hbase][:regionserver][:worker_period] = 10_000 ## 10_000
-
-# hbase.balancer.period (default 300000) -- Period at which the region balancer runs in
-# the Master.
-#
-default[:hbase][:regionserver][:balancer_period] = 300_000 ## 300_000
-
-# hbase.regions.slop (default 0) -- Rebalance if any regionserver has more than
-#
-# average + (average * slop) regions
-#
-default[:hbase][:regionserver][:balancer_slop] = 0 ## 0
-
-# hbase.regionserver.global.memstore.upperLimit (default 0.4) -- Maximum size of all
-# memstores in a region server before new updates are blocked and flushes are
-# forced. Defaults to 40% of heap
-#
-default[:hbase][:memstore][:flush_upper_heap_pct] = 0.4 ## 0.4
-
-# hbase.regionserver.global.memstore.lowerLimit (default 0.35) -- When memstores are being
-# forced to flush to make room in memory, keep flushing until we hit this mark. Defaults
-# to 35% of heap. This value equal to hbase.regionserver.global.memstore.upperLimit
-# causes the minimum possible flushing to occur when updates are blocked due to memstore
-# limiting.
-#
-default[:hbase][:memstore][:flush_lower_heap_pct] = 0.35 ## 0.35
-
-# hbase.hregion.memstore.flush.size (default 67108864) -- Memstore will be flushed to disk
-# if size of the memstore exceeds this number of bytes. Value is checked by a thread
-# that runs every hbase.server.thread.wakefrequency.
-#
-default[:hbase][:memstore][:flush_size_trigger] = 67_108_864 ## 67108864
-
-# hbase.hregion.preclose.flush.size (default 5 mb) -- If the memstores in a region are
-# this size or larger when we go to close, run a "pre-flush" to clear out memstores
-# before we put up the region closed flag and take the region offline. On close, a
-# flush is run under the close flag to empty memory. During this time the region is
-# offline and we are not taking on any writes. If the memstore content is large, this
-# flush could take a long time to complete. The preflush is meant to clean out the bulk
-# of the memstore before putting up the close flag and taking the region offline so the
-# flush that runs under the close flag has little to do.
-#
-default[:hbase][:memstore][:preflush_trigger] = (5 * 1024 * 1024) # (5 * 1024 * 1024)
-
-# hbase.hregion.memstore.block.multiplier (default 2) -- Block updates if memstore has
-# hbase.hregion.block.memstore time hbase.hregion.flush.size bytes. Useful preventing
-# runaway memstore during spikes in update traffic. Without an upper-bound, memstore
-# fills such that when it flushes the resultant flush files take a long time to compact
-# or split, or worse, we OOME.
-#
-default[:hbase][:memstore][:flush_stall_trigger] = 8 ## 2
-
-# hbase.hregion.memstore.mslab.enabled (default false) -- Experimental: Enables the
-# MemStore-Local Allocation Buffer, a feature which works to prevent heap fragmentation
-# under heavy write loads. This can reduce the frequency of stop-the-world GC pauses on
-# large heaps.
-#
-default[:hbase][:memstore][:mslab_enabled] = false ## false
-
-# hbase.hregion.max.filesize (default 268435456) -- Maximum HStoreFile size. If any one of
-# a column families' HStoreFiles has grown to exceed this value, the hosting HRegion is
-# split in two.
-#
-default[:hbase][:regionserver][:max_filesize] = (256 * 1024 * 1024) ## 256 MB
-
-# hbase.hstore.compactionThreshold (default 3) -- If more than this number of HStoreFiles
-# in any one HStore (one HStoreFile is written per flush of memstore) then a compaction
-# is run to rewrite all HStoreFiles files as one. Larger numbers put off compaction but
-# when it runs, it takes longer to complete.
-#
-default[:hbase][:compaction][:files_trigger] = 3 ## 3
-
-# hbase.hstore.blockingStoreFiles (default 7) -- If more than this number of StoreFiles in
-# any one Store (one StoreFile is written per flush of MemStore) then updates are
-# blocked for this HRegion until a compaction is completed, or until
-# hbase.hstore.blockingWaitTime has been exceeded.
-#
-default[:hbase][:compaction][:pause_trigger] = 7 ## 7
-
-# hbase.hstore.blockingWaitTime (default 90_000) -- The time an HRegion will block updates
-# for after hitting the StoreFile limit defined by hbase.hstore.blockingStoreFiles.
-# After this time has elapsed, the HRegion will stop blocking updates even if a
-# compaction has not been completed. Default: 90 seconds.
-#
-default[:hbase][:compaction][:pause_time] = 90_000 ## 90000
-
-# hbase.hstore.compaction.max (default 10) -- Max number of HStoreFiles to compact per
-# 'minor' compaction.
-#
-default[:hbase][:compaction][:max_combine_files] = 10 ## 10
-
-# hbase.hregion.majorcompaction (default 86400000) -- The time (in miliseconds) between
-# 'major' compactions of all HStoreFiles in a region. Default: 1 day. Set to 0 to
-# disable automated major compactions.
-#
-default[:hbase][:compaction][:period] = 86_400_000 ## 86_400_000
-
-# hbase.mapreduce.hfileoutputformat.blocksize (default 65536) -- The mapreduce
-# HFileOutputFormat writes storefiles/hfiles. This is the minimum hfile blocksize to
-# emit. Usually in hbase, writing hfiles, the blocksize is gotten from the table schema
-# (HColumnDescriptor) but in the mapreduce outputformat context, we don't have access to
-# the schema so get blocksize from Configuation. The smaller you make the blocksize,
-# the bigger your index and the less you fetch on a random-access. Set the blocksize
-# down if you have small cells and want faster random-access of individual cells.
-#
-default[:hbase][:regionserver][:hfile_block_size] = 65536 ## 65536
-
-# hbase.regionserver.codecs (default "") -- To have a RegionServer test a set of
-# codecs and fail-to-start if any code is missing or misinstalled, add the
-# configuration hbase.regionserver.codecs to your hbase-site.xml with a value of
-# codecs to test on startup. For example if the hbase.regionserver.codecs value
-# is "lzo,gz" and if lzo is not present or improperly installed, the misconfigured
-# RegionServer will fail to start.
-#
-default[:hbase][:regionserver][:required_codecs] = ""
-
-# hfile.block.cache.size (default 0.2) -- Percentage of maximum heap (-Xmx setting) to
-# allocate to block cache used by HFile/StoreFile. Default of 0.2 means allocate 20%.
-# Set to 0 to disable.
-#
-default[:hbase][:regionserver][:block_cache_size] = 0.2 ## 0.2
-
-# hbase.hash.type (default murmur) -- The hashing algorithm for use in HashFunction. Two
-# values are supported now: murmur (MurmurHash) and jenkins (JenkinsHash). Used by
-# bloom filters.
-#
-default[:hbase][:regionserver][:hash_type] = "murmur" ## "murmur"
-
-#
-# Built-in zookeeper
-#
-
-# zookeeper.session.timeout (default 180_000) -- ZooKeeper session timeout. HBase passes
-# this to the zk quorum as suggested maximum time for a session. See
-# http://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions
-# "The client sends a requested timeout, the server responds with the timeout that it
-# can give the client. " In milliseconds.
-#
-default[:hbase][:zookeeper][:session_timeout] = 180_000 ## 180_000
-
-# zookeeper.znode.parent (default "/hbase") -- Root ZNode for HBase in
-# ZooKeeper. All of HBase's ZooKeeper files that are configured with a
-# relative path will go under this node. By default, all of HBase's ZooKeeper
-# file path are configured with a relative path, so they will all go under
-# this directory unless changed.
-#
-default[:hbase][:zookeeper][:znode_parent] = '/hbase' ## "/hbase"
-
-# zookeeper.znode.rootserver (default root-region-server) -- Path to ZNode
-# holding root region location. This is written by the master and read by
-# clients and region servers. If a relative path is given, the parent folder
-# will be ${zookeeper.znode.parent}. By default, this means the root location
-# is stored at /hbase/root-region-server.
-#
-default[:hbase][:zookeeper][:znode_rootserver] = "root-region-server" ## "root-region-server"
-
-# hbase.zookeeper.property.maxClientCnxns (default 2000) -- Limit on number of concurrent
-# connections (at the socket level) that a single client, identified by IP address, may
-# make to a single member of the ZooKeeper ensemble. Set high to avoid zk connection
-# issues running standalone and pseudo-distributed.
-#
-default[:hbase][:zookeeper][:max_client_connections] = 2000 ## 2000
-
-
-# # The zookeeper.session.timeout must be a minimum of 2 times the tickTime and a maximum of 20 times the tickTime. " For large clusters or latent environments (eg EC2), set it to 6 or 9 even from its current setting of '3'.
-# default[:hbase][:regionserver][:zookeeper_tick_time] = 6 # 3
-
-# default[:hbase][:dfs][:replication] = 3
-
-# hbase.master.lease.period | 120000
-# hbase.master.meta.thread.rescanfrequency | 60000
-# hbase.hbasemaster.maxregionopen | 120000
-# hbase.regions.percheckin | 10
-# hfile.min.blocksize.size | 65536
-# zookeeper.retries | 5
-# zookeeper.pause | 2000
-# zookeeper.znode.safemode | safe-mode
-# hbase.zookeeper.property.tickTime | 3000
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/metadata.json b/lc-gdn-chef/cookbooks/hbase_cluster/metadata.json
deleted file mode 100644
index 30211643de6a75ca799612cd0442f2236d2f03fd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/metadata.json
+++ /dev/null
@@ -1,1049 +0,0 @@
-{
- "name": "hbase_cluster",
- "description": "HBase: a massively-scalable high-throughput datastore based on the Hadoop HDFS",
- "long_description": "# hbase chef cookbook\n\nHBase: a massively-scalable high-throughput datastore based on the Hadoop HDFS\n\n## Overview\n\nInstalls/Configures HBase\n\n## Recipes \n\n* `backup_tables` - Cron job to backup tables to S3\n* `config` - Finalizes the config, writes out the config files\n* `dashboard` - Simple dashboard for HBase config and state\n* `default` - Base configuration for hbase\n* `master` - HBase Master\n* `regionserver` - HBase Regionserver\n* `stargate` - HBase Stargate: HTTP frontend to HBase\n* `thrift` - HBase Thrift Listener\n\n## Integration\n\nSupports platforms: debian and ubuntu\n\nCookbook dependencies:\n* java\n* apt\n* runit\n* volumes\n* metachef\n* dashpot\n* hadoop_cluster\n* zookeeper\n* ganglia\n\n\n## Attributes\n\n* `[:groups][:hbase][:gid]` - (default: \"304\")\n* `[:hbase][:tmp_dir]` - (default: \"/mnt/hbase/tmp\")\n* `[:hbase][:home_dir]` - (default: \"/usr/lib/hbase\")\n* `[:hbase][:conf_dir]` - (default: \"/etc/hbase/conf\")\n* `[:hbase][:log_dir]` - (default: \"/var/log/hbase\")\n* `[:hbase][:pid_dir]` - (default: \"/var/run/hbase\")\n* `[:hbase][:weekly_backup_tables]` - \n* `[:hbase][:backup_location]` - (default: \"/mnt/hbase/bkup\")\n* `[:hbase][:master][:java_heap_size_max]` - (default: \"1000m\")\n - total size of the JVM heap (master)\n* `[:hbase][:master][:java_heap_size_new]` - (default: \"256m\")\n - size of the JVM \"New Generation/Eden\" heap segment (master)\n* `[:hbase][:master][:gc_tuning_opts]` - (default: \"-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts\")\n - JVM garbage collection tuning for the hbase master\n* `[:hbase][:master][:gc_log_opts]` - (default: \"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps\")\n - What details to log about JVM garbage collection statistics for the hbase master\n* `[:hbase][:master][:run_state]` - (default: \"start\")\n* `[:hbase][:master][:port]` - (default: \"60000\")\n* `[:hbase][:master][:dash_port]` - (default: \"60010\")\n* `[:hbase][:master][:jmx_dash_port]` - (default: \"10101\")\n* `[:hbase][:regionserver][:java_heap_size_max]` - (default: \"2000m\")\n - total size of the JVM heap (regionserver)\n* `[:hbase][:regionserver][:java_heap_size_new]` - (default: \"256m\")\n - size of the JVM \"New Generation/Eden\" heap segment (regionserver)\n* `[:hbase][:regionserver][:gc_tuning_opts]` - (default: \"-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts -XX:CMSInitiatingOccupancyFraction=88\")\n - JVM garbage collection tuning for the hbase regionserver\n* `[:hbase][:regionserver][:gc_log_opts]` - (default: \"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps\")\n - What details to log about JVM garbage collection statistics for the hbase regionserver\n* `[:hbase][:regionserver][:run_state]` - (default: \"start\")\n* `[:hbase][:regionserver][:port]` - (default: \"60020\")\n* `[:hbase][:regionserver][:dash_port]` - (default: \"60030\")\n* `[:hbase][:regionserver][:jmx_dash_port]` - (default: \"10102\")\n* `[:hbase][:regionserver][:lease_period]` - (default: \"60000\")\n - hbase.regionserver.lease.period (default 60000) -- HRegion server lease period in\n milliseconds. Default is 60 seconds. Clients must report in within this period else\n they are considered dead.\n* `[:hbase][:regionserver][:handler_count]` - (default: \"10\")\n - hbase.regionserver.handler.count (default 10) -- Count of RPC Server instances spun up\n on RegionServers. Same property is used by the Master for count of master handlers.\n* `[:hbase][:regionserver][:split_limit]` - (default: \"2147483647\")\n - hbase.regionserver.regionSplitLimit (default 2147483647) -- Limit for the number of\n regions after which no more region splitting should take place. This is not a hard\n limit for the number of regions but acts as a guideline for the regionserver to stop\n splitting after a certain limit. Default is set to MAX_INT; i.e. do not block\n splitting.\n* `[:hbase][:regionserver][:msg_period]` - (default: \"3000\")\n - hbase.regionserver.msginterval (default 3000) -- Interval between messages from the\n RegionServer to Master in milliseconds.\n* `[:hbase][:regionserver][:log_flush_period]` - (default: \"1000\")\n - hbase.regionserver.optionallogflushinterval (default 1000) -- Sync the HLog to the HDFS\n after this interval if it has not accumulated enough entries to trigger a\n sync. Default 1 second. Units: milliseconds.\n* `[:hbase][:regionserver][:logroll_period]` - (default: \"3600000\")\n - hbase.regionserver.logroll.period (default 3600000) -- Period at which we will roll the\n commit log regardless of how many edits it has.\n* `[:hbase][:regionserver][:split_check_period]` - (default: \"20000\")\n - hbase.regionserver.thread.splitcompactcheckfrequency (default 20000) -- How often a\n region server runs the split/compaction check.\n* `[:hbase][:regionserver][:worker_period]` - (default: \"10000\")\n - hbase.server.thread.wakefrequency (default 10_000) -- Time to sleep in between searches\n for work (in milliseconds). Used as sleep interval by service threads such as log roller.\n* `[:hbase][:regionserver][:balancer_period]` - (default: \"300000\")\n - hbase.balancer.period (default 300000) -- Period at which the region balancer runs in\n the Master.\n* `[:hbase][:regionserver][:balancer_slop]` - (default: \"0\")\n - hbase.regions.slop (default 0) -- Rebalance if any regionserver has more than\n average + (average * slop) regions\n* `[:hbase][:regionserver][:max_filesize]` - (default: \"268435456\")\n - hbase.hregion.max.filesize (default 268435456) -- Maximum HStoreFile size. If any one of\n a column families' HStoreFiles has grown to exceed this value, the hosting HRegion is\n split in two.\n* `[:hbase][:regionserver][:hfile_block_size]` - (default: \"65536\")\n - hbase.mapreduce.hfileoutputformat.blocksize (default 65536) -- The mapreduce\n HFileOutputFormat writes storefiles/hfiles. This is the minimum hfile blocksize to\n emit. Usually in hbase, writing hfiles, the blocksize is gotten from the table schema\n (HColumnDescriptor) but in the mapreduce outputformat context, we don't have access to\n the schema so get blocksize from Configuation. The smaller you make the blocksize,\n the bigger your index and the less you fetch on a random-access. Set the blocksize\n down if you have small cells and want faster random-access of individual cells.\n* `[:hbase][:regionserver][:required_codecs]` - \n - hbase.regionserver.codecs (default \"\") -- To have a RegionServer test a set of\n codecs and fail-to-start if any code is missing or misinstalled, add the\n configuration hbase.regionserver.codecs to your hbase-site.xml with a value of\n codecs to test on startup. For example if the hbase.regionserver.codecs value\n is \"lzo,gz\" and if lzo is not present or improperly installed, the misconfigured\n RegionServer will fail to start.\n* `[:hbase][:regionserver][:block_cache_size]` - (default: \"0.2\")\n - hfile.block.cache.size (default 0.2) -- Percentage of maximum heap (-Xmx setting) to\n allocate to block cache used by HFile/StoreFile. Default of 0.2 means allocate 20%.\n Set to 0 to disable.\n* `[:hbase][:regionserver][:hash_type]` - (default: \"murmur\")\n - hbase.hash.type (default murmur) -- The hashing algorithm for use in HashFunction. Two\n values are supported now: murmur (MurmurHash) and jenkins (JenkinsHash). Used by\n bloom filters.\n* `[:hbase][:stargate][:run_state]` - (default: \"start\")\n* `[:hbase][:stargate][:port]` - (default: \"8080\")\n* `[:hbase][:stargate][:jmx_dash_port]` - (default: \"10105\")\n* `[:hbase][:stargate][:readonly]` - \n - hbase.rest.readonly (default false) -- Defines the mode the REST server will be started\n in. Possible values are: false: All HTTP methods are permitted - GET/PUT/POST/DELETE.\n true: Only the GET method is permitted.\n* `[:hbase][:thrift][:run_state]` - (default: \"start\")\n* `[:hbase][:thrift][:jmx_dash_port]` - (default: \"10104\")\n* `[:hbase][:zookeeper][:jmx_dash_port]` - (default: \"10103\")\n* `[:hbase][:zookeeper][:peer_port]` - (default: \"2888\")\n* `[:hbase][:zookeeper][:leader_port]` - (default: \"3888\")\n* `[:hbase][:zookeeper][:client_port]` - (default: \"2181\")\n* `[:hbase][:zookeeper][:session_timeout]` - (default: \"180000\")\n - zookeeper.session.timeout (default 180_000) -- ZooKeeper session timeout. HBase passes\n this to the zk quorum as suggested maximum time for a session. See\n http://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions\n \"The client sends a requested timeout, the server responds with the timeout that it\n can give the client. \" In milliseconds.\n* `[:hbase][:zookeeper][:znode_parent]` - (default: \"/hbase\")\n - zookeeper.znode.parent (default \"/hbase\") -- Root ZNode for HBase in\n ZooKeeper. All of HBase's ZooKeeper files that are configured with a\n relative path will go under this node. By default, all of HBase's ZooKeeper\n file path are configured with a relative path, so they will all go under\n this directory unless changed.\n* `[:hbase][:zookeeper][:znode_rootserver]` - (default: \"root-region-server\")\n - zookeeper.znode.rootserver (default root-region-server) -- Path to ZNode\n holding root region location. This is written by the master and read by\n clients and region servers. If a relative path is given, the parent folder\n will be ${zookeeper.znode.parent}. By default, this means the root location\n is stored at /hbase/root-region-server.\n* `[:hbase][:zookeeper][:max_client_connections]` - (default: \"2000\")\n - hbase.zookeeper.property.maxClientCnxns (default 2000) -- Limit on number of concurrent\n connections (at the socket level) that a single client, identified by IP address, may\n make to a single member of the ZooKeeper ensemble. Set high to avoid zk connection\n issues running standalone and pseudo-distributed.\n* `[:hbase][:client][:write_buffer]` - (default: \"2097152\")\n - hbase.client.write.buffer (default 2097152) Default size of the HTable client write\n buffer in bytes. A bigger buffer takes more memory -- on both the client and server\n side since server instantiates the passed write buffer to process it -- but a larger\n buffer size reduces the number of RPCs made. For an estimate of server-side\n memory-used, evaluate\n hbase.client.write.buffer * hbase.regionserver.handler.count\n* `[:hbase][:client][:pause_period_ms]` - (default: \"1000\")\n - hbase.client.pause (default 1000) -- General client pause value. Used mostly as value\n to wait before running a retry of a failed get, region lookup, etc.\n* `[:hbase][:client][:retry_count]` - (default: \"10\")\n - hbase.client.retries.number (default 10) -- Maximum retries. Used as maximum for all\n retryable operations such as fetching of the root region from root region server,\n getting a cell's value, starting a row update, etc.\n* `[:hbase][:client][:scanner_prefetch_rows]` - (default: \"1\")\n - hbase.client.scanner.caching (default 1) -- Number of rows that will be fetched when\n calling next on a scanner if it is not served from (local, client) memory. Higher\n caching values will enable faster scanners but will eat up more memory and some calls\n of next may take longer and longer times when the cache is empty. Do not set this\n value such that the time between invocations is greater than the scanner timeout;\n i.e. hbase.regionserver.lease.period\n* `[:hbase][:client][:max_keyvalue_size]` - (default: \"10485760\")\n - hbase.client.keyvalue.maxsize (default 10485760) -- Specifies the combined maximum\n allowed size of a KeyValue instance. This is to set an upper boundary for a single\n entry saved in a storage file. Since they cannot be split it helps avoiding that a\n region cannot be split any further because the data is too large. It seems wise to set\n this to a fraction of the maximum region size. Setting it to zero or less disables the\n check.\n* `[:hbase][:memstore][:flush_upper_heap_pct]` - (default: \"0.4\")\n - hbase.regionserver.global.memstore.upperLimit (default 0.4) -- Maximum size of all\n memstores in a region server before new updates are blocked and flushes are\n forced. Defaults to 40% of heap\n* `[:hbase][:memstore][:flush_lower_heap_pct]` - (default: \"0.35\")\n - hbase.regionserver.global.memstore.lowerLimit (default 0.35) -- When memstores are being\n forced to flush to make room in memory, keep flushing until we hit this mark. Defaults\n to 35% of heap. This value equal to hbase.regionserver.global.memstore.upperLimit\n causes the minimum possible flushing to occur when updates are blocked due to memstore\n limiting.\n* `[:hbase][:memstore][:flush_size_trigger]` - (default: \"67108864\")\n - hbase.hregion.memstore.flush.size (default 67108864) -- Memstore will be flushed to disk\n if size of the memstore exceeds this number of bytes. Value is checked by a thread\n that runs every hbase.server.thread.wakefrequency.\n* `[:hbase][:memstore][:preflush_trigger]` - (default: \"5242880\")\n - hbase.hregion.preclose.flush.size (default 5 mb) -- If the memstores in a region are\n this size or larger when we go to close, run a \"pre-flush\" to clear out memstores\n before we put up the region closed flag and take the region offline. On close, a\n flush is run under the close flag to empty memory. During this time the region is\n offline and we are not taking on any writes. If the memstore content is large, this\n flush could take a long time to complete. The preflush is meant to clean out the bulk\n of the memstore before putting up the close flag and taking the region offline so the\n flush that runs under the close flag has little to do.\n* `[:hbase][:memstore][:flush_stall_trigger]` - (default: \"8\")\n - hbase.hregion.memstore.block.multiplier (default 2) -- Block updates if memstore has\n hbase.hregion.block.memstore time hbase.hregion.flush.size bytes. Useful preventing\n runaway memstore during spikes in update traffic. Without an upper-bound, memstore\n fills such that when it flushes the resultant flush files take a long time to compact\n or split, or worse, we OOME.\n* `[:hbase][:memstore][:mslab_enabled]` - \n - hbase.hregion.memstore.mslab.enabled (default false) -- Experimental: Enables the\n MemStore-Local Allocation Buffer, a feature which works to prevent heap fragmentation\n under heavy write loads. This can reduce the frequency of stop-the-world GC pauses on\n large heaps.\n* `[:hbase][:compaction][:files_trigger]` - (default: \"3\")\n - hbase.hstore.compactionThreshold (default 3) -- If more than this number of HStoreFiles\n in any one HStore (one HStoreFile is written per flush of memstore) then a compaction\n is run to rewrite all HStoreFiles files as one. Larger numbers put off compaction but\n when it runs, it takes longer to complete.\n* `[:hbase][:compaction][:pause_trigger]` - (default: \"7\")\n - hbase.hstore.blockingStoreFiles (default 7) -- If more than this number of StoreFiles in\n any one Store (one StoreFile is written per flush of MemStore) then updates are\n blocked for this HRegion until a compaction is completed, or until\n hbase.hstore.blockingWaitTime has been exceeded.\n* `[:hbase][:compaction][:pause_time]` - (default: \"90000\")\n - hbase.hstore.blockingWaitTime (default 90_000) -- The time an HRegion will block updates\n for after hitting the StoreFile limit defined by hbase.hstore.blockingStoreFiles.\n After this time has elapsed, the HRegion will stop blocking updates even if a\n compaction has not been completed. Default: 90 seconds.\n* `[:hbase][:compaction][:max_combine_files]` - (default: \"10\")\n - hbase.hstore.compaction.max (default 10) -- Max number of HStoreFiles to compact per\n 'minor' compaction.\n* `[:hbase][:compaction][:period]` - (default: \"86400000\")\n - hbase.hregion.majorcompaction (default 86400000) -- The time (in miliseconds) between\n 'major' compactions of all HStoreFiles in a region. Default: 1 day. Set to 0 to\n disable automated major compactions.\n* `[:users][:hbase][:uid]` - (default: \"304\")\n* `[:tuning][:ulimit][:hbase]` - \n\n## License and Author\n\nAuthor:: Chris Howe - Infochimps, Inc ()\nCopyright:: 2011, Chris Howe - Infochimps, Inc\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\n> readme generated by [cluster_chef](http://github.com/infochimps/cluster_chef)'s cookbook_munger\n",
- "maintainer": "Chris Howe - Infochimps, Inc",
- "maintainer_email": "coders@infochimps.com",
- "license": "Apache 2.0",
- "platforms": {
- "debian": ">= 0.0.0",
- "ubuntu": ">= 0.0.0"
- },
- "dependencies": {
- "java": ">= 0.0.0",
- "apt": ">= 0.0.0",
- "runit": ">= 0.0.0",
- "volumes": ">= 0.0.0",
- "metachef": ">= 0.0.0",
- "dashpot": ">= 0.0.0",
- "hadoop_cluster": ">= 0.0.0",
- "zookeeper": ">= 0.0.0",
- "ganglia": ">= 0.0.0"
- },
- "recommendations": {
- },
- "suggestions": {
- },
- "conflicting": {
- },
- "providing": {
- },
- "replacing": {
- },
- "attributes": {
- "groups/hbase/gid": {
- "display_name": "",
- "description": "",
- "default": "304",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/tmp_dir": {
- "display_name": "",
- "description": "",
- "default": "/mnt/hbase/tmp",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/home_dir": {
- "display_name": "",
- "description": "",
- "default": "/usr/lib/hbase",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/conf_dir": {
- "display_name": "",
- "description": "",
- "default": "/etc/hbase/conf",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/log_dir": {
- "display_name": "",
- "description": "",
- "default": "/var/log/hbase",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/pid_dir": {
- "display_name": "",
- "description": "",
- "default": "/var/run/hbase",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/weekly_backup_tables": {
- "display_name": "",
- "description": "",
- "default": "",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/backup_location": {
- "display_name": "",
- "description": "",
- "default": "/mnt/hbase/bkup",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/java_heap_size_max": {
- "display_name": "",
- "description": "total size of the JVM heap (master)",
- "default": "1000m",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/java_heap_size_new": {
- "display_name": "",
- "description": "size of the JVM \"New Generation/Eden\" heap segment (master)",
- "default": "256m",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/gc_tuning_opts": {
- "display_name": "",
- "description": "JVM garbage collection tuning for the hbase master",
- "default": "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/gc_log_opts": {
- "display_name": "",
- "description": "What details to log about JVM garbage collection statistics for the hbase master",
- "default": "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/run_state": {
- "display_name": "",
- "description": "",
- "type": "array",
- "default": "start",
- "choice": [
-
- ],
- "calculated": false,
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/port": {
- "display_name": "",
- "description": "",
- "default": "60000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/dash_port": {
- "display_name": "",
- "description": "",
- "default": "60010",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/master/jmx_dash_port": {
- "display_name": "",
- "description": "",
- "default": "10101",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/java_heap_size_max": {
- "display_name": "",
- "description": "total size of the JVM heap (regionserver)",
- "default": "2000m",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/java_heap_size_new": {
- "display_name": "",
- "description": "size of the JVM \"New Generation/Eden\" heap segment (regionserver)",
- "default": "256m",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/gc_tuning_opts": {
- "display_name": "",
- "description": "JVM garbage collection tuning for the hbase regionserver",
- "default": "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts -XX:CMSInitiatingOccupancyFraction=88",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/gc_log_opts": {
- "display_name": "",
- "description": "What details to log about JVM garbage collection statistics for the hbase regionserver",
- "default": "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/run_state": {
- "display_name": "",
- "description": "",
- "type": "array",
- "default": "start",
- "choice": [
-
- ],
- "calculated": false,
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/port": {
- "display_name": "",
- "description": "",
- "default": "60020",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/dash_port": {
- "display_name": "",
- "description": "",
- "default": "60030",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/jmx_dash_port": {
- "display_name": "",
- "description": "",
- "default": "10102",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/lease_period": {
- "display_name": "",
- "description": "hbase.regionserver.lease.period (default 60000) -- HRegion server lease period in\nmilliseconds. Default is 60 seconds. Clients must report in within this period else\nthey are considered dead.",
- "default": "60000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/handler_count": {
- "display_name": "",
- "description": "hbase.regionserver.handler.count (default 10) -- Count of RPC Server instances spun up\non RegionServers. Same property is used by the Master for count of master handlers.",
- "default": "10",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/split_limit": {
- "display_name": "",
- "description": "hbase.regionserver.regionSplitLimit (default 2147483647) -- Limit for the number of\nregions after which no more region splitting should take place. This is not a hard\nlimit for the number of regions but acts as a guideline for the regionserver to stop\nsplitting after a certain limit. Default is set to MAX_INT; i.e. do not block\nsplitting.",
- "default": "2147483647",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/msg_period": {
- "display_name": "",
- "description": "hbase.regionserver.msginterval (default 3000) -- Interval between messages from the\nRegionServer to Master in milliseconds.",
- "default": "3000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/log_flush_period": {
- "display_name": "",
- "description": "hbase.regionserver.optionallogflushinterval (default 1000) -- Sync the HLog to the HDFS\nafter this interval if it has not accumulated enough entries to trigger a\nsync. Default 1 second. Units: milliseconds.",
- "default": "1000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/logroll_period": {
- "display_name": "",
- "description": "hbase.regionserver.logroll.period (default 3600000) -- Period at which we will roll the\ncommit log regardless of how many edits it has.",
- "default": "3600000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/split_check_period": {
- "display_name": "",
- "description": "hbase.regionserver.thread.splitcompactcheckfrequency (default 20000) -- How often a\nregion server runs the split/compaction check.",
- "default": "20000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/worker_period": {
- "display_name": "",
- "description": "hbase.server.thread.wakefrequency (default 10_000) -- Time to sleep in between searches\nfor work (in milliseconds). Used as sleep interval by service threads such as log roller.",
- "default": "10000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/balancer_period": {
- "display_name": "",
- "description": "hbase.balancer.period (default 300000) -- Period at which the region balancer runs in\nthe Master.",
- "default": "300000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/balancer_slop": {
- "display_name": "",
- "description": "hbase.regions.slop (default 0) -- Rebalance if any regionserver has more than\naverage + (average * slop) regions",
- "default": "0",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/max_filesize": {
- "display_name": "",
- "description": "hbase.hregion.max.filesize (default 268435456) -- Maximum HStoreFile size. If any one of\na column families' HStoreFiles has grown to exceed this value, the hosting HRegion is\nsplit in two.",
- "default": "268435456",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/hfile_block_size": {
- "display_name": "",
- "description": "hbase.mapreduce.hfileoutputformat.blocksize (default 65536) -- The mapreduce\nHFileOutputFormat writes storefiles/hfiles. This is the minimum hfile blocksize to\nemit. Usually in hbase, writing hfiles, the blocksize is gotten from the table schema\n(HColumnDescriptor) but in the mapreduce outputformat context, we don't have access to\nthe schema so get blocksize from Configuation. The smaller you make the blocksize,\nthe bigger your index and the less you fetch on a random-access. Set the blocksize\ndown if you have small cells and want faster random-access of individual cells.",
- "default": "65536",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/required_codecs": {
- "display_name": "",
- "description": "hbase.regionserver.codecs (default \"\") -- To have a RegionServer test a set of\ncodecs and fail-to-start if any code is missing or misinstalled, add the\nconfiguration hbase.regionserver.codecs to your hbase-site.xml with a value of\ncodecs to test on startup. For example if the hbase.regionserver.codecs value\nis \"lzo,gz\" and if lzo is not present or improperly installed, the misconfigured\nRegionServer will fail to start.",
- "default": "",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/block_cache_size": {
- "display_name": "",
- "description": "hfile.block.cache.size (default 0.2) -- Percentage of maximum heap (-Xmx setting) to\nallocate to block cache used by HFile/StoreFile. Default of 0.2 means allocate 20%.\nSet to 0 to disable.",
- "default": "0.2",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/regionserver/hash_type": {
- "display_name": "",
- "description": "hbase.hash.type (default murmur) -- The hashing algorithm for use in HashFunction. Two\nvalues are supported now: murmur (MurmurHash) and jenkins (JenkinsHash). Used by\nbloom filters.",
- "default": "murmur",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/stargate/run_state": {
- "display_name": "",
- "description": "",
- "type": "array",
- "default": "start",
- "choice": [
-
- ],
- "calculated": false,
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/stargate/port": {
- "display_name": "",
- "description": "",
- "default": "8080",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/stargate/jmx_dash_port": {
- "display_name": "",
- "description": "",
- "default": "10105",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/stargate/readonly": {
- "display_name": "",
- "description": "hbase.rest.readonly (default false) -- Defines the mode the REST server will be started\nin. Possible values are: false: All HTTP methods are permitted - GET/PUT/POST/DELETE.\ntrue: Only the GET method is permitted.",
- "default": "",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/thrift/run_state": {
- "display_name": "",
- "description": "",
- "default": "start",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/thrift/jmx_dash_port": {
- "display_name": "",
- "description": "",
- "default": "10104",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/jmx_dash_port": {
- "display_name": "",
- "description": "",
- "default": "10103",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/peer_port": {
- "display_name": "",
- "description": "",
- "default": "2888",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/leader_port": {
- "display_name": "",
- "description": "",
- "default": "3888",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/client_port": {
- "display_name": "",
- "description": "",
- "default": "2181",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/session_timeout": {
- "display_name": "",
- "description": "zookeeper.session.timeout (default 180_000) -- ZooKeeper session timeout. HBase passes\nthis to the zk quorum as suggested maximum time for a session. See\nhttp://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions\n\"The client sends a requested timeout, the server responds with the timeout that it\ncan give the client. \" In milliseconds.",
- "default": "180000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/znode_parent": {
- "display_name": "",
- "description": "zookeeper.znode.parent (default \"/hbase\") -- Root ZNode for HBase in\nZooKeeper. All of HBase's ZooKeeper files that are configured with a\nrelative path will go under this node. By default, all of HBase's ZooKeeper\nfile path are configured with a relative path, so they will all go under\nthis directory unless changed.",
- "default": "/hbase",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/znode_rootserver": {
- "display_name": "",
- "description": "zookeeper.znode.rootserver (default root-region-server) -- Path to ZNode\nholding root region location. This is written by the master and read by\nclients and region servers. If a relative path is given, the parent folder\nwill be ${zookeeper.znode.parent}. By default, this means the root location\nis stored at /hbase/root-region-server.",
- "default": "root-region-server",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/zookeeper/max_client_connections": {
- "display_name": "",
- "description": "hbase.zookeeper.property.maxClientCnxns (default 2000) -- Limit on number of concurrent\nconnections (at the socket level) that a single client, identified by IP address, may\nmake to a single member of the ZooKeeper ensemble. Set high to avoid zk connection\nissues running standalone and pseudo-distributed.",
- "default": "2000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/client/write_buffer": {
- "display_name": "",
- "description": "hbase.client.write.buffer (default 2097152) Default size of the HTable client write\nbuffer in bytes. A bigger buffer takes more memory -- on both the client and server\nside since server instantiates the passed write buffer to process it -- but a larger\nbuffer size reduces the number of RPCs made. For an estimate of server-side\nmemory-used, evaluate\nhbase.client.write.buffer * hbase.regionserver.handler.count",
- "default": "2097152",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/client/pause_period_ms": {
- "display_name": "",
- "description": "hbase.client.pause (default 1000) -- General client pause value. Used mostly as value\nto wait before running a retry of a failed get, region lookup, etc.",
- "default": "1000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/client/retry_count": {
- "display_name": "",
- "description": "hbase.client.retries.number (default 10) -- Maximum retries. Used as maximum for all\nretryable operations such as fetching of the root region from root region server,\ngetting a cell's value, starting a row update, etc.",
- "default": "10",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/client/scanner_prefetch_rows": {
- "display_name": "",
- "description": "hbase.client.scanner.caching (default 1) -- Number of rows that will be fetched when\ncalling next on a scanner if it is not served from (local, client) memory. Higher\ncaching values will enable faster scanners but will eat up more memory and some calls\nof next may take longer and longer times when the cache is empty. Do not set this\nvalue such that the time between invocations is greater than the scanner timeout;\ni.e. hbase.regionserver.lease.period",
- "default": "1",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/client/max_keyvalue_size": {
- "display_name": "",
- "description": "hbase.client.keyvalue.maxsize (default 10485760) -- Specifies the combined maximum\nallowed size of a KeyValue instance. This is to set an upper boundary for a single\nentry saved in a storage file. Since they cannot be split it helps avoiding that a\nregion cannot be split any further because the data is too large. It seems wise to set\nthis to a fraction of the maximum region size. Setting it to zero or less disables the\ncheck.",
- "default": "10485760",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/flush_upper_heap_pct": {
- "display_name": "",
- "description": "hbase.regionserver.global.memstore.upperLimit (default 0.4) -- Maximum size of all\nmemstores in a region server before new updates are blocked and flushes are\nforced. Defaults to 40% of heap",
- "default": "0.4",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/flush_lower_heap_pct": {
- "display_name": "",
- "description": "hbase.regionserver.global.memstore.lowerLimit (default 0.35) -- When memstores are being\nforced to flush to make room in memory, keep flushing until we hit this mark. Defaults\nto 35% of heap. This value equal to hbase.regionserver.global.memstore.upperLimit\ncauses the minimum possible flushing to occur when updates are blocked due to memstore\nlimiting.",
- "default": "0.35",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/flush_size_trigger": {
- "display_name": "",
- "description": "hbase.hregion.memstore.flush.size (default 67108864) -- Memstore will be flushed to disk\nif size of the memstore exceeds this number of bytes. Value is checked by a thread\nthat runs every hbase.server.thread.wakefrequency.",
- "default": "67108864",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/preflush_trigger": {
- "display_name": "",
- "description": "hbase.hregion.preclose.flush.size (default 5 mb) -- If the memstores in a region are\nthis size or larger when we go to close, run a \"pre-flush\" to clear out memstores\nbefore we put up the region closed flag and take the region offline. On close, a\nflush is run under the close flag to empty memory. During this time the region is\noffline and we are not taking on any writes. If the memstore content is large, this\nflush could take a long time to complete. The preflush is meant to clean out the bulk\nof the memstore before putting up the close flag and taking the region offline so the\nflush that runs under the close flag has little to do.",
- "default": "5242880",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/flush_stall_trigger": {
- "display_name": "",
- "description": "hbase.hregion.memstore.block.multiplier (default 2) -- Block updates if memstore has\nhbase.hregion.block.memstore time hbase.hregion.flush.size bytes. Useful preventing\nrunaway memstore during spikes in update traffic. Without an upper-bound, memstore\nfills such that when it flushes the resultant flush files take a long time to compact\nor split, or worse, we OOME.",
- "default": "8",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/memstore/mslab_enabled": {
- "display_name": "",
- "description": "hbase.hregion.memstore.mslab.enabled (default false) -- Experimental: Enables the\nMemStore-Local Allocation Buffer, a feature which works to prevent heap fragmentation\nunder heavy write loads. This can reduce the frequency of stop-the-world GC pauses on\nlarge heaps.",
- "default": "",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/compaction/files_trigger": {
- "display_name": "",
- "description": "hbase.hstore.compactionThreshold (default 3) -- If more than this number of HStoreFiles\nin any one HStore (one HStoreFile is written per flush of memstore) then a compaction\nis run to rewrite all HStoreFiles files as one. Larger numbers put off compaction but\nwhen it runs, it takes longer to complete.",
- "default": "3",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/compaction/pause_trigger": {
- "display_name": "",
- "description": "hbase.hstore.blockingStoreFiles (default 7) -- If more than this number of StoreFiles in\nany one Store (one StoreFile is written per flush of MemStore) then updates are\nblocked for this HRegion until a compaction is completed, or until\nhbase.hstore.blockingWaitTime has been exceeded.",
- "default": "7",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/compaction/pause_time": {
- "display_name": "",
- "description": "hbase.hstore.blockingWaitTime (default 90_000) -- The time an HRegion will block updates\nfor after hitting the StoreFile limit defined by hbase.hstore.blockingStoreFiles.\nAfter this time has elapsed, the HRegion will stop blocking updates even if a\ncompaction has not been completed. Default: 90 seconds.",
- "default": "90000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/compaction/max_combine_files": {
- "display_name": "",
- "description": "hbase.hstore.compaction.max (default 10) -- Max number of HStoreFiles to compact per\n'minor' compaction.",
- "default": "10",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "hbase/compaction/period": {
- "display_name": "",
- "description": "hbase.hregion.majorcompaction (default 86400000) -- The time (in miliseconds) between\n'major' compactions of all HStoreFiles in a region. Default: 1 day. Set to 0 to\ndisable automated major compactions.",
- "default": "86400000",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "users/hbase/uid": {
- "display_name": "",
- "description": "",
- "default": "304",
- "choice": [
-
- ],
- "calculated": false,
- "type": "string",
- "required": "optional",
- "recipes": [
-
- ]
- },
- "tuning/ulimit/hbase": {
- "display_name": "",
- "description": "",
- "type": "hash",
- "default": {
- "nofile": {
- "both": 32768
- },
- "nproc": {
- "both": 50000
- }
- },
- "choice": [
-
- ],
- "calculated": false,
- "required": "optional",
- "recipes": [
-
- ]
- }
- },
- "groupings": {
- },
- "recipes": {
- "hbase::backup_tables": "Cron job to backup tables to S3",
- "hbase::default": "Base configuration for hbase",
- "hbase::master": "HBase Master",
- "hbase::regionserver": "HBase Regionserver",
- "hbase::stargate": "HBase Stargate: HTTP frontend to HBase",
- "hbase::thrift": "HBase Thrift Listener",
- "hbase::dashboard": "Simple dashboard for HBase config and state",
- "hbase::config": "Finalizes the config, writes out the config files"
- },
- "version": "3.0.4"
-}
\ No newline at end of file
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/metadata.rb b/lc-gdn-chef/cookbooks/hbase_cluster/metadata.rb
deleted file mode 100644
index 68a0e9602dbfa344beed5c80f13dcef0b3c96074..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/metadata.rb
+++ /dev/null
@@ -1,389 +0,0 @@
-maintainer "Chris Howe - Infochimps, Inc"
-maintainer_email "coders@infochimps.com"
-license "Apache 2.0"
-long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
-version "3.0.4"
-
-description "HBase: a massively-scalable high-throughput datastore based on the Hadoop HDFS"
-
-depends "java"
-depends "apt"
-depends "runit"
-depends "volumes"
-depends "metachef"
-depends "dashpot"
-depends "hadoop_cluster"
-depends "zookeeper"
-depends "ganglia"
-
-recipe "hbase::backup_tables", "Cron job to backup tables to S3"
-recipe "hbase::default", "Base configuration for hbase"
-recipe "hbase::master", "HBase Master"
-recipe "hbase::regionserver", "HBase Regionserver"
-recipe "hbase::stargate", "HBase Stargate: HTTP frontend to HBase"
-recipe "hbase::thrift", "HBase Thrift Listener"
-recipe "hbase::dashboard", "Simple dashboard for HBase config and state"
-recipe "hbase::config", "Finalizes the config, writes out the config files"
-
-%w[ debian ubuntu ].each do |os|
- supports os
-end
-
-attribute "groups/hbase/gid",
- :display_name => "",
- :description => "",
- :default => "304"
-
-attribute "hbase/tmp_dir",
- :display_name => "",
- :description => "",
- :default => "/mnt/hbase/tmp"
-
-attribute "hbase/home_dir",
- :display_name => "",
- :description => "",
- :default => "/usr/lib/hbase"
-
-attribute "hbase/conf_dir",
- :display_name => "",
- :description => "",
- :default => "/etc/hbase/conf"
-
-attribute "hbase/log_dir",
- :display_name => "",
- :description => "",
- :default => "/var/log/hbase"
-
-attribute "hbase/pid_dir",
- :display_name => "",
- :description => "",
- :default => "/var/run/hbase"
-
-attribute "hbase/weekly_backup_tables",
- :display_name => "",
- :description => "",
- :default => ""
-
-attribute "hbase/backup_location",
- :display_name => "",
- :description => "",
- :default => "/mnt/hbase/bkup"
-
-attribute "hbase/master/java_heap_size_max",
- :display_name => "",
- :description => "total size of the JVM heap (master)",
- :default => "1000m"
-
-attribute "hbase/master/java_heap_size_new",
- :display_name => "",
- :description => "size of the JVM \"New Generation/Eden\" heap segment (master)",
- :default => "256m"
-
-attribute "hbase/master/gc_tuning_opts",
- :display_name => "",
- :description => "JVM garbage collection tuning for the hbase master",
- :default => "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts"
-
-attribute "hbase/master/gc_log_opts",
- :display_name => "",
- :description => "What details to log about JVM garbage collection statistics for the hbase master",
- :default => "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps"
-
-attribute "hbase/master/run_state",
- :display_name => "",
- :description => "",
- :type => "array",
- :default => "start"
-
-attribute "hbase/master/port",
- :display_name => "",
- :description => "",
- :default => "60000"
-
-attribute "hbase/master/dash_port",
- :display_name => "",
- :description => "",
- :default => "60010"
-
-attribute "hbase/master/jmx_dash_port",
- :display_name => "",
- :description => "",
- :default => "10101"
-
-attribute "hbase/regionserver/java_heap_size_max",
- :display_name => "",
- :description => "total size of the JVM heap (regionserver)",
- :default => "2000m"
-
-attribute "hbase/regionserver/java_heap_size_new",
- :display_name => "",
- :description => "size of the JVM \"New Generation/Eden\" heap segment (regionserver)",
- :default => "256m"
-
-attribute "hbase/regionserver/gc_tuning_opts",
- :display_name => "",
- :description => "JVM garbage collection tuning for the hbase regionserver",
- :default => "-XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:+AggressiveOpts -XX:CMSInitiatingOccupancyFraction=88"
-
-attribute "hbase/regionserver/gc_log_opts",
- :display_name => "",
- :description => "What details to log about JVM garbage collection statistics for the hbase regionserver",
- :default => "-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps"
-
-attribute "hbase/regionserver/run_state",
- :display_name => "",
- :description => "",
- :type => "array",
- :default => "start"
-
-attribute "hbase/regionserver/port",
- :display_name => "",
- :description => "",
- :default => "60020"
-
-attribute "hbase/regionserver/dash_port",
- :display_name => "",
- :description => "",
- :default => "60030"
-
-attribute "hbase/regionserver/jmx_dash_port",
- :display_name => "",
- :description => "",
- :default => "10102"
-
-attribute "hbase/regionserver/lease_period",
- :display_name => "",
- :description => "hbase.regionserver.lease.period (default 60000) -- HRegion server lease period in\nmilliseconds. Default is 60 seconds. Clients must report in within this period else\nthey are considered dead.",
- :default => "60000"
-
-attribute "hbase/regionserver/handler_count",
- :display_name => "",
- :description => "hbase.regionserver.handler.count (default 10) -- Count of RPC Server instances spun up\non RegionServers. Same property is used by the Master for count of master handlers.",
- :default => "10"
-
-attribute "hbase/regionserver/split_limit",
- :display_name => "",
- :description => "hbase.regionserver.regionSplitLimit (default 2147483647) -- Limit for the number of\nregions after which no more region splitting should take place. This is not a hard\nlimit for the number of regions but acts as a guideline for the regionserver to stop\nsplitting after a certain limit. Default is set to MAX_INT; i.e. do not block\nsplitting.",
- :default => "2147483647"
-
-attribute "hbase/regionserver/msg_period",
- :display_name => "",
- :description => "hbase.regionserver.msginterval (default 3000) -- Interval between messages from the\nRegionServer to Master in milliseconds.",
- :default => "3000"
-
-attribute "hbase/regionserver/log_flush_period",
- :display_name => "",
- :description => "hbase.regionserver.optionallogflushinterval (default 1000) -- Sync the HLog to the HDFS\nafter this interval if it has not accumulated enough entries to trigger a\nsync. Default 1 second. Units: milliseconds.",
- :default => "1000"
-
-attribute "hbase/regionserver/logroll_period",
- :display_name => "",
- :description => "hbase.regionserver.logroll.period (default 3600000) -- Period at which we will roll the\ncommit log regardless of how many edits it has.",
- :default => "3600000"
-
-attribute "hbase/regionserver/split_check_period",
- :display_name => "",
- :description => "hbase.regionserver.thread.splitcompactcheckfrequency (default 20000) -- How often a\nregion server runs the split/compaction check.",
- :default => "20000"
-
-attribute "hbase/regionserver/worker_period",
- :display_name => "",
- :description => "hbase.server.thread.wakefrequency (default 10_000) -- Time to sleep in between searches\nfor work (in milliseconds). Used as sleep interval by service threads such as log roller.",
- :default => "10000"
-
-attribute "hbase/regionserver/balancer_period",
- :display_name => "",
- :description => "hbase.balancer.period (default 300000) -- Period at which the region balancer runs in\nthe Master.",
- :default => "300000"
-
-attribute "hbase/regionserver/balancer_slop",
- :display_name => "",
- :description => "hbase.regions.slop (default 0) -- Rebalance if any regionserver has more than\naverage + (average * slop) regions",
- :default => "0"
-
-attribute "hbase/regionserver/max_filesize",
- :display_name => "",
- :description => "hbase.hregion.max.filesize (default 268435456) -- Maximum HStoreFile size. If any one of\na column families' HStoreFiles has grown to exceed this value, the hosting HRegion is\nsplit in two.",
- :default => "268435456"
-
-attribute "hbase/regionserver/hfile_block_size",
- :display_name => "",
- :description => "hbase.mapreduce.hfileoutputformat.blocksize (default 65536) -- The mapreduce\nHFileOutputFormat writes storefiles/hfiles. This is the minimum hfile blocksize to\nemit. Usually in hbase, writing hfiles, the blocksize is gotten from the table schema\n(HColumnDescriptor) but in the mapreduce outputformat context, we don't have access to\nthe schema so get blocksize from Configuation. The smaller you make the blocksize,\nthe bigger your index and the less you fetch on a random-access. Set the blocksize\ndown if you have small cells and want faster random-access of individual cells.",
- :default => "65536"
-
-attribute "hbase/regionserver/required_codecs",
- :display_name => "",
- :description => "hbase.regionserver.codecs (default \"\") -- To have a RegionServer test a set of\ncodecs and fail-to-start if any code is missing or misinstalled, add the\nconfiguration hbase.regionserver.codecs to your hbase-site.xml with a value of\ncodecs to test on startup. For example if the hbase.regionserver.codecs value\nis \"lzo,gz\" and if lzo is not present or improperly installed, the misconfigured\nRegionServer will fail to start.",
- :default => ""
-
-attribute "hbase/regionserver/block_cache_size",
- :display_name => "",
- :description => "hfile.block.cache.size (default 0.2) -- Percentage of maximum heap (-Xmx setting) to\nallocate to block cache used by HFile/StoreFile. Default of 0.2 means allocate 20%.\nSet to 0 to disable.",
- :default => "0.2"
-
-attribute "hbase/regionserver/hash_type",
- :display_name => "",
- :description => "hbase.hash.type (default murmur) -- The hashing algorithm for use in HashFunction. Two\nvalues are supported now: murmur (MurmurHash) and jenkins (JenkinsHash). Used by\nbloom filters.",
- :default => "murmur"
-
-attribute "hbase/stargate/run_state",
- :display_name => "",
- :description => "",
- :type => "array",
- :default => "start"
-
-attribute "hbase/stargate/port",
- :display_name => "",
- :description => "",
- :default => "8080"
-
-attribute "hbase/stargate/jmx_dash_port",
- :display_name => "",
- :description => "",
- :default => "10105"
-
-attribute "hbase/stargate/readonly",
- :display_name => "",
- :description => "hbase.rest.readonly (default false) -- Defines the mode the REST server will be started\nin. Possible values are: false: All HTTP methods are permitted - GET/PUT/POST/DELETE.\ntrue: Only the GET method is permitted.",
- :default => ""
-
-attribute "hbase/thrift/run_state",
- :display_name => "",
- :description => "",
- :default => "start"
-
-attribute "hbase/thrift/jmx_dash_port",
- :display_name => "",
- :description => "",
- :default => "10104"
-
-attribute "hbase/zookeeper/jmx_dash_port",
- :display_name => "",
- :description => "",
- :default => "10103"
-
-attribute "hbase/zookeeper/peer_port",
- :display_name => "",
- :description => "",
- :default => "2888"
-
-attribute "hbase/zookeeper/leader_port",
- :display_name => "",
- :description => "",
- :default => "3888"
-
-attribute "hbase/zookeeper/client_port",
- :display_name => "",
- :description => "",
- :default => "2181"
-
-attribute "hbase/zookeeper/session_timeout",
- :display_name => "",
- :description => "zookeeper.session.timeout (default 180_000) -- ZooKeeper session timeout. HBase passes\nthis to the zk quorum as suggested maximum time for a session. See\nhttp://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions\n\"The client sends a requested timeout, the server responds with the timeout that it\ncan give the client. \" In milliseconds.",
- :default => "180000"
-
-attribute "hbase/zookeeper/znode_parent",
- :display_name => "",
- :description => "zookeeper.znode.parent (default \"/hbase\") -- Root ZNode for HBase in\nZooKeeper. All of HBase's ZooKeeper files that are configured with a\nrelative path will go under this node. By default, all of HBase's ZooKeeper\nfile path are configured with a relative path, so they will all go under\nthis directory unless changed.",
- :default => "/hbase"
-
-attribute "hbase/zookeeper/znode_rootserver",
- :display_name => "",
- :description => "zookeeper.znode.rootserver (default root-region-server) -- Path to ZNode\nholding root region location. This is written by the master and read by\nclients and region servers. If a relative path is given, the parent folder\nwill be ${zookeeper.znode.parent}. By default, this means the root location\nis stored at /hbase/root-region-server.",
- :default => "root-region-server"
-
-attribute "hbase/zookeeper/max_client_connections",
- :display_name => "",
- :description => "hbase.zookeeper.property.maxClientCnxns (default 2000) -- Limit on number of concurrent\nconnections (at the socket level) that a single client, identified by IP address, may\nmake to a single member of the ZooKeeper ensemble. Set high to avoid zk connection\nissues running standalone and pseudo-distributed.",
- :default => "2000"
-
-attribute "hbase/client/write_buffer",
- :display_name => "",
- :description => "hbase.client.write.buffer (default 2097152) Default size of the HTable client write\nbuffer in bytes. A bigger buffer takes more memory -- on both the client and server\nside since server instantiates the passed write buffer to process it -- but a larger\nbuffer size reduces the number of RPCs made. For an estimate of server-side\nmemory-used, evaluate\nhbase.client.write.buffer * hbase.regionserver.handler.count",
- :default => "2097152"
-
-attribute "hbase/client/pause_period_ms",
- :display_name => "",
- :description => "hbase.client.pause (default 1000) -- General client pause value. Used mostly as value\nto wait before running a retry of a failed get, region lookup, etc.",
- :default => "1000"
-
-attribute "hbase/client/retry_count",
- :display_name => "",
- :description => "hbase.client.retries.number (default 10) -- Maximum retries. Used as maximum for all\nretryable operations such as fetching of the root region from root region server,\ngetting a cell's value, starting a row update, etc.",
- :default => "10"
-
-attribute "hbase/client/scanner_prefetch_rows",
- :display_name => "",
- :description => "hbase.client.scanner.caching (default 1) -- Number of rows that will be fetched when\ncalling next on a scanner if it is not served from (local, client) memory. Higher\ncaching values will enable faster scanners but will eat up more memory and some calls\nof next may take longer and longer times when the cache is empty. Do not set this\nvalue such that the time between invocations is greater than the scanner timeout;\ni.e. hbase.regionserver.lease.period",
- :default => "1"
-
-attribute "hbase/client/max_keyvalue_size",
- :display_name => "",
- :description => "hbase.client.keyvalue.maxsize (default 10485760) -- Specifies the combined maximum\nallowed size of a KeyValue instance. This is to set an upper boundary for a single\nentry saved in a storage file. Since they cannot be split it helps avoiding that a\nregion cannot be split any further because the data is too large. It seems wise to set\nthis to a fraction of the maximum region size. Setting it to zero or less disables the\ncheck.",
- :default => "10485760"
-
-attribute "hbase/memstore/flush_upper_heap_pct",
- :display_name => "",
- :description => "hbase.regionserver.global.memstore.upperLimit (default 0.4) -- Maximum size of all\nmemstores in a region server before new updates are blocked and flushes are\nforced. Defaults to 40% of heap",
- :default => "0.4"
-
-attribute "hbase/memstore/flush_lower_heap_pct",
- :display_name => "",
- :description => "hbase.regionserver.global.memstore.lowerLimit (default 0.35) -- When memstores are being\nforced to flush to make room in memory, keep flushing until we hit this mark. Defaults\nto 35% of heap. This value equal to hbase.regionserver.global.memstore.upperLimit\ncauses the minimum possible flushing to occur when updates are blocked due to memstore\nlimiting.",
- :default => "0.35"
-
-attribute "hbase/memstore/flush_size_trigger",
- :display_name => "",
- :description => "hbase.hregion.memstore.flush.size (default 67108864) -- Memstore will be flushed to disk\nif size of the memstore exceeds this number of bytes. Value is checked by a thread\nthat runs every hbase.server.thread.wakefrequency.",
- :default => "67108864"
-
-attribute "hbase/memstore/preflush_trigger",
- :display_name => "",
- :description => "hbase.hregion.preclose.flush.size (default 5 mb) -- If the memstores in a region are\nthis size or larger when we go to close, run a \"pre-flush\" to clear out memstores\nbefore we put up the region closed flag and take the region offline. On close, a\nflush is run under the close flag to empty memory. During this time the region is\noffline and we are not taking on any writes. If the memstore content is large, this\nflush could take a long time to complete. The preflush is meant to clean out the bulk\nof the memstore before putting up the close flag and taking the region offline so the\nflush that runs under the close flag has little to do.",
- :default => "5242880"
-
-attribute "hbase/memstore/flush_stall_trigger",
- :display_name => "",
- :description => "hbase.hregion.memstore.block.multiplier (default 2) -- Block updates if memstore has\nhbase.hregion.block.memstore time hbase.hregion.flush.size bytes. Useful preventing\nrunaway memstore during spikes in update traffic. Without an upper-bound, memstore\nfills such that when it flushes the resultant flush files take a long time to compact\nor split, or worse, we OOME.",
- :default => "8"
-
-attribute "hbase/memstore/mslab_enabled",
- :display_name => "",
- :description => "hbase.hregion.memstore.mslab.enabled (default false) -- Experimental: Enables the\nMemStore-Local Allocation Buffer, a feature which works to prevent heap fragmentation\nunder heavy write loads. This can reduce the frequency of stop-the-world GC pauses on\nlarge heaps.",
- :default => ""
-
-attribute "hbase/compaction/files_trigger",
- :display_name => "",
- :description => "hbase.hstore.compactionThreshold (default 3) -- If more than this number of HStoreFiles\nin any one HStore (one HStoreFile is written per flush of memstore) then a compaction\nis run to rewrite all HStoreFiles files as one. Larger numbers put off compaction but\nwhen it runs, it takes longer to complete.",
- :default => "3"
-
-attribute "hbase/compaction/pause_trigger",
- :display_name => "",
- :description => "hbase.hstore.blockingStoreFiles (default 7) -- If more than this number of StoreFiles in\nany one Store (one StoreFile is written per flush of MemStore) then updates are\nblocked for this HRegion until a compaction is completed, or until\nhbase.hstore.blockingWaitTime has been exceeded.",
- :default => "7"
-
-attribute "hbase/compaction/pause_time",
- :display_name => "",
- :description => "hbase.hstore.blockingWaitTime (default 90_000) -- The time an HRegion will block updates\nfor after hitting the StoreFile limit defined by hbase.hstore.blockingStoreFiles.\nAfter this time has elapsed, the HRegion will stop blocking updates even if a\ncompaction has not been completed. Default: 90 seconds.",
- :default => "90000"
-
-attribute "hbase/compaction/max_combine_files",
- :display_name => "",
- :description => "hbase.hstore.compaction.max (default 10) -- Max number of HStoreFiles to compact per\n'minor' compaction.",
- :default => "10"
-
-attribute "hbase/compaction/period",
- :display_name => "",
- :description => "hbase.hregion.majorcompaction (default 86400000) -- The time (in miliseconds) between\n'major' compactions of all HStoreFiles in a region. Default: 1 day. Set to 0 to\ndisable automated major compactions.",
- :default => "86400000"
-
-attribute "users/hbase/uid",
- :display_name => "",
- :description => "",
- :default => "304"
-
-attribute "tuning/ulimit/hbase",
- :display_name => "",
- :description => "",
- :type => "hash",
- :default => {:nofile=>{:both=>32768}, :nproc=>{:both=>50000}}
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/backup_tables.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/backup_tables.rb
deleted file mode 100644
index 5d51f82a6b993744aa14a4fe0747cb9bbc996aeb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/backup_tables.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: Cron job to backup tables to S3
-# Recipe:: backup_tables
-# Author:: Chris Howe - Infochimps, Inc
-#
-# Copyright 2011, Chris Howe - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-
-template "/etc/cron.weekly/backup_hbase_tables" do
- source "export_hbase_tables.rb.erb"
- mode "0744"
- variables( :hbase => node[:hbase] )
-end
-
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/config.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/config.rb
deleted file mode 100644
index c09a01f41318d215871074541a9f1d81f458a6a3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/config.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: Finalizes the config, writes out the config files
-# Recipe:: config
-# Author:: Philip (flip) Kromer - Infochimps, Inc
-#
-# Copyright 2011, Philip (flip) Kromer - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-
-#
-# Configuration files
-#
-hbase_config = Mash.new({
- :namenode_fqdn => (discover(:hadoop, :namenode) && discover(:hadoop, :namenode ).private_hostname),
- :jobtracker_addr => (discover(:hadoop, :jobtracker) && discover(:hadoop, :jobtracker).private_ip),
- :zookeeper_addrs => discover_all(:zookeeper, :server).map(&:private_ip).sort,
- :ganglia => discover(:ganglia, :server),
- :ganglia_addr => (discover(:ganglia, :server) && discover(:ganglia, :server).private_hostname),
- :private_ip => private_ip_of(node),
- :ganglia_port => 8649,
- :period => 10
- })
-
-
-[:hadoop].each do |component|
- next unless node[component]
- Array(node[component][:exported_jars]).flatten.each do |export|
- link "#{node[:hbase][:home_dir]}/lib/#{File.basename(export)}" do
- to export
- end
- end
-
-
-
- # FIXME: this is brittle, but the best I can do. can you do better?
- Array(node[component][:exported_libs]).flatten.each do |export|
- pathsegs = export.gsub(%r{\A.*/native/([\w\.\-]+)/([^/]+)\z}, '\1/\2')
- Chef::Log.info( [ 'hbase using hadoop native libs', pathsegs, node[:hadoop][:exported_libs], node[:hbase][:home_dir] ].inspect )
- link "#{node[:hbase][:home_dir]}/lib/native/#{pathsegs}" do
- to export
- end
- end
-end
-
-
-%w[ hbase-env.sh hbase-site.xml hadoop-metrics.properties ].each do |conf_file|
- template "#{node[:hbase][:conf_dir]}/#{conf_file}" do
- owner "root"
- mode "0644"
- source "#{conf_file}.erb"
- variables(hbase_config.merge(:hbase => node[:hbase]))
- notify_startable_services(:hbase, node[:hbase][:services])
- end
-end
-
-template "/etc/default/hbase" do
- owner "root"
- mode "0644"
- source "etc_default_hbase.erb"
- variables(hbase_config.merge(:hbase => node[:hbase]))
- notify_startable_services(:hbase, node[:hbase][:services])
-end
-
-template "#{node[:hbase][:home_dir]}/bin/hbase" do
- owner "root"
- mode "0755"
- source "bin-hbase.erb"
- variables(hbase_config.merge(:hbase => node[:hbase]))
- notify_startable_services(:hbase, node[:hbase][:services])
-end
-
-if node[:hadoop] && node[:hadoop][:conf_dir]
- link "#{node[:hadoop][:conf_dir]}/hbase-site.xml" do
- to "#{node[:hbase][:conf_dir]}/hbase-site.xml"
- only_if{ File.exists?(node[:hadoop][:conf_dir]) }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/dashboard.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/dashboard.rb
deleted file mode 100644
index 692d8ab8b70c7ba69dfdef092e1e9fac9cf2be72..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/dashboard.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: Simple dashboard for HBase config and state
-# Recipe:: dashboard
-# Author:: Philip (flip) Kromer - Infochimps, Inc
-#
-# Copyright 2011, Philip (flip) Kromer - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'dashpot'
-
-#
-# Register dashboard
-#
-
-%w[ master regionserver stargate ].each do |component|
- info = node[:hbase][component]
- next unless info && (info[:run_state].to_s == 'start') && info[:dash_port]
- hsh = { "addr" => private_ip_of(node) }.merge(info)
- add_dashboard_link("hbase.#{component}", hsh)
-end
-
-#
-# Drop in our mini-dashboard
-#
-
-dashpot_dashboard(:hbase) do
- summary_keys = %w[
- ==Daemons
- hadoop.namenode.addr hadoop.namenode.port hadoop.jobtracker.addr hadoop.jobtracker.port
- hbase.master.addr hbase.master.port hbase.master.dash_port
- hbase.regionserver.addr hbase.regionserver.port
- public_ip fqdn cloud.private_ips cloud.public_ips
- ----
- hadoop.namenode.run_state
- hadoop.secondarynn.run_state
- hadoop.jobtracker.run_state
- hadoop.datanode.run_state
- hadoop.tasktracker.run_state
- ==Tuning
- hadoop.max_map_tasks hadoop.max_reduce_tasks
- hadoop.java_child_opts hadoop.java_child_ulimit hadoop.io_sort_factor hadoop.io_sort_mb
- cpu.total memory.total ec2.instance_type
- ----
- hadoop.namenode.handler_count
- hadoop.jobtracker.handler_count
- hadoop.datanode.handler_count
- hadoop.tasktracker.http_threads
- hadoop.reducer_parallel_copies
- ----
- hadoop.namenode.java_heap_size_max
- hadoop.secondarynn.java_heap_size_max
- hadoop.jobtracker.java_heap_size_max
- hadoop.datanode.java_heap_size_max
- hadoop.tasktracker.java_heap_size_max
- ==MapReduce
- hadoop.compress_output
- hadoop.compress_output_type
- hadoop.compress_output_codec
- hadoop.compress_mapout
- hadoop.compress_mapout_codec
- ==HDFS
- hadoop.min_split_size hadoop.s3_block_size hadoop.hdfs_block_size hadoop.dfs_replication
- ==Install
- hadoop.home_dir hadoop.pid_dir hadoop.tmp_dir hadoop.log_dir
- hadoop.namenode.data_dirs
- hadoop.secondarynn.data_dirs
- hadoop.datanode.data_dirs
- hadoop.tasktracker.scratch_dirs
- ----
- apt.cloudera.release_name hadoop.deb_version
- ]
-
- action :create
- variables Mash.new(:summary_keys => summary_keys).merge(node[:hbase])
-end
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/default.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/default.rb
deleted file mode 100644
index cbc78a752a8238c8d6d705c50d3aeb2de80eb5bb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/default.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: Base configuration for hbase
-# Recipe:: default
-# Author:: Chris Howe - Infochimps, Inc
-#
-# Copyright 2011, Chris Howe - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "java::sun"
-include_recipe "apt"
-include_recipe "metachef"
-include_recipe "volumes"
-include_recipe "ganglia"
-
-include_recipe "hadoop_cluster"
-include_recipe "hadoop_cluster::add_cloudera_repo"
-include_recipe "zookeeper::client"
-
-#
-# Users
-#
-
-daemon_user(:hbase)
-
-# Install
-package "hadoop-hbase"
-
-standard_dirs('hbase') do
- directories :conf_dir, :pid_dir, :tmp_dir, :log_dir
-end
-
-node[:hbase][:services].each do |svc|
- directory("#{node[:hbase][:log_dir]}/#{svc}"){ action(:create) ; owner 'hbase' ; group 'hbase'; mode "0755" }
-end
-
-# JMX should listen on the public interface
-node[:hbase][:jmx_dash_addr] = public_ip_of(node)
-
-# FIXME: don't hardcode these...
-link("#{node[:hbase][:home_dir]}/hbase.jar" ){ to "hbase-0.90.4-cdh3u2.jar" }
-link("#{node[:hbase][:home_dir]}/hbase-tests.jar"){ to "hbase-0.90.4-cdh3u2-tests.jar" }
-
-# Stuff the HBase jars into the classpath
-node[:hadoop][:extra_classpaths][:hbase] = '/usr/lib/hbase/hbase.jar:/usr/lib/hbase/conf'
-node[:hbase][:exported_jars] = [ "#{node[:hbase][:home_dir]}/hbase.jar", "#{node[:hbase][:home_dir]}/hbase-tests.jar", ]
-node[:hbase][:exported_confs] = [ "#{node[:hbase][:conf_dir]}/hbase-default.xml", "#{node[:hbase][:conf_dir]}/hbase-site.xml",]
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/master.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/master.rb
deleted file mode 100644
index 57e908f66687dc189d52008b27236957f375f853..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/master.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: HBase Master
-# Recipe:: master
-# Author:: Chris Howe - Infochimps, Inc
-#
-# Copyright 2011, Chris Howe - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-include_recipe "runit"
-
-# Install
-package "hadoop-hbase-master"
-
-# Set up service
-runit_service "hbase_master" do
- run_state node[:hbase][:master][:run_state]
- options Mash.new(:service_name => 'master').merge(node[:hbase]).merge(node[:hbase][:master])
-end
-
-kill_old_service("hadoop-hbase-master"){ hard(:real_hard) ; only_if{ File.exists?("/etc/init.d/hadoop-hbase-master") } }
-
-announce(:hbase, :master)
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/regionserver.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/regionserver.rb
deleted file mode 100644
index a0fa6833642886609ab407ca759037879c12f9e2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/regionserver.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: HBase Regionserver
-# Recipe:: regionserver
-# Author:: Chris Howe - Infochimps, Inc
-#
-# Copyright 2011, Chris Howe - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-include_recipe "runit"
-
-# Install
-package "hadoop-hbase-regionserver"
-
-# Set up service
-runit_service "hbase_regionserver" do
- run_state node[:hbase][:regionserver][:run_state]
- options Mash.new(:service_name => 'regionserver').merge(node[:hbase]).merge(node[:hbase][:regionserver])
-end
-
-kill_old_service("hadoop-hbase-regionserver"){ hard(:real_hard) ; only_if{ File.exists?("/etc/init.d/hadoop-hbase-regionserver") } }
-
-announce(:hbase, :regionserver)
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/stargate.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/stargate.rb
deleted file mode 100644
index 1aa2611f1d48e9e140dacebf768872a883184115..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/stargate.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: HBase Stargate: HTTP frontend to HBase
-# Recipe:: stargate
-# Author:: Chris Howe - Infochimps, Inc
-#
-# Copyright 2011, Chris Howe - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-include_recipe "runit"
-
-
-
-
-# Set up service
-runit_service "hbase_stargate" do
- run_state node[:hbase][:stargate][:run_state]
- options Mash.new(:service_name => 'stargate', :command_name => 'rest').merge(node[:hbase]).merge(node[:hbase][:stargate])
-end
-
-kill_old_service("hadoop-hbase-stargate"){ hard(:real_hard) ; only_if{ File.exists?("/etc/init.d/hadoop-hbase-stargate") } }
-
-announce(:hbase, :stargate)
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/thrift.rb b/lc-gdn-chef/cookbooks/hbase_cluster/recipes/thrift.rb
deleted file mode 100644
index e758dfdde596ba4d81ae5799bde326b52643418c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/recipes/thrift.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Cookbook Name:: hbase
-# Description:: HBase Thrift Listener
-# Recipe:: thrift
-# Author:: Philip (flip) Kromer - Infochimps, Inc
-#
-# Copyright 2011, Philip (flip) Kromer - Infochimps, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe "hbase"
-include_recipe "runit"
-
-# Install
-package "hadoop-hbase-thrift"
-
-# Set up service
-runit_service "hbase_thrift" do
- run_state node[:hbase][:thrift][:run_state]
- options Mash.new(:service_name => 'thrift').merge(node[:hbase]).merge(node[:hbase][:thrift])
-end
-
-kill_old_service("hadoop-hbase-thrift"){ hard(:real_hard) ; only_if{ File.exists?("/etc/init.d/hadoop-hbase-thrift") } }
-
-announce(:hbase, :thrift)
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/bin-hbase.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/bin-hbase.erb
deleted file mode 100755
index 274700e90582be28aa101ca80ed5af182dea0db8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/bin-hbase.erb
+++ /dev/null
@@ -1,320 +0,0 @@
-#!/usr/bin/env bash
-#
-# Patched by chef: changes will be overwritten
-#
-# This patched version will use the system jruby if JRUBY_HOME is set,
-# and uses JRUBY_OPTS if JRUBY_OPTS is set
-#
-#/**
-# * Copyright 2007 The Apache Software Foundation
-# *
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements. See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership. The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License. You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-#
-# The hbase command script. Based on the hadoop command script putting
-# in hbase classes, libs and configurations ahead of hadoop's.
-#
-# TODO: Narrow the amount of duplicated code.
-#
-# Environment Variables:
-#
-# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
-#
-# HBASE_CLASSPATH Extra Java CLASSPATH entries.
-#
-# HBASE_HEAPSIZE The maximum amount of heap to use, in MB.
-# Default is 1000.
-#
-# HBASE_OPTS Extra Java runtime options.
-#
-# HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
-#
-# HBASE_ROOT_LOGGER The root appender. Default is INFO,console
-#
-# MAVEN_HOME Where mvn is installed.
-#
-# JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
-# Defaults to the jar packaged with HBase.
-#
-# JRUBY_OPTS Extra options (eg '--1.9') passed to the hbase shell.
-# Empty by default.
-#
-bin=`dirname "$0"`
-bin=`cd "$bin">/dev/null; pwd`
-
-# This will set HBASE_HOME, etc.
-. "$bin"/hbase-config.sh
-
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-# Detect if we are in hbase sources dir
-in_dev_env=false
-if [ -d "${HBASE_HOME}/target" ]; then
- in_dev_env=true
-fi
-
-# if no args specified, show usage
-if [ $# = 0 ]; then
- echo "Usage: hbase "
- echo "where is one of:"
- echo " shell run the HBase shell"
- echo " zkcli run the ZooKeeper shell"
- echo " master run an HBase HMaster node"
- echo " regionserver run an HBase HRegionServer node"
- echo " zookeeper run a Zookeeper server"
- echo " rest run an HBase REST server"
- echo " thrift run an HBase Thrift server"
- echo " avro run an HBase Avro server"
- echo " migrate upgrade an hbase.rootdir"
- echo " hbck run the hbase 'fsck' tool"
- echo " classpath dump hbase CLASSPATH"
- echo " or"
- echo " CLASSNAME run the class named CLASSNAME"
- echo "Most commands print help when invoked w/o parameters."
- exit 1
-fi
-
-# get arguments
-COMMAND=$1
-shift
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m
-
-MVN="mvn"
-if [ "$MAVEN_HOME" != "" ]; then
- MVN=${MAVEN_HOME}/bin/mvn
-fi
-
-# check envvars which might override default args
-if [ "$HBASE_HEAPSIZE" != "" ]; then
- #echo "run with heapsize $HBASE_HEAPSIZE"
- JAVA_HEAP_MAX="-Xmx""$HBASE_HEAPSIZE""m"
- #echo $JAVA_HEAP_MAX
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# CLASSPATH initially contains $HBASE_CONF_DIR
-CLASSPATH="${HBASE_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-add_maven_deps_to_classpath() {
- # Need to generate classpath from maven pom. This is costly so generate it
- # and cache it. Save the file into our target dir so a mvn clean will get
- # clean it up and force us create a new one.
- f="${HBASE_HOME}/target/cached_classpath.txt"
- if [ ! -f "${f}" ]
- then
- ${MVN} -f "${HBASE_HOME}/pom.xml" dependency:build-classpath -Dmdep.outputFile="${f}" &> /dev/null
- fi
- CLASSPATH=${CLASSPATH}:`cat "${f}"`
-}
-
-add_maven_main_classes_to_classpath() {
- if [ -d "$HBASE_HOME/target/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HBASE_HOME/target/classes
- fi
-}
-
-add_maven_test_classes_to_classpath() {
- # For developers, add hbase classes to CLASSPATH
- f="$HBASE_HOME/target/test-classes"
- if [ -d "${f}" ]; then
- CLASSPATH=${CLASSPATH}:${f}
- fi
-}
-
-# Add maven target directory
-if $in_dev_env; then
- add_maven_deps_to_classpath
- add_maven_main_classes_to_classpath
- add_maven_test_classes_to_classpath
-fi
-
-# For releases, add hbase & webapps to CLASSPATH
-# Webapps must come first else it messes up Jetty
-if [ -d "$HBASE_HOME/hbase-webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HBASE_HOME
-fi
-if [ -d "$HBASE_HOME/target/hbase-webapps" ]; then
- CLASSPATH="${CLASSPATH}:${HBASE_HOME}/target"
-fi
-for f in $HBASE_HOME/hbase*.jar; do
- if [[ $f = *sources.jar ]]
- then
- : # Skip sources.jar
- elif [ -f $f ]
- then
- CLASSPATH=${CLASSPATH}:$f;
- fi
-done
-
-# Add libs to CLASSPATH
-for f in $HBASE_HOME/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# Add user-specified CLASSPATH last
-if [ "$HBASE_CLASSPATH" != "" ]; then
- CLASSPATH=${CLASSPATH}:${HBASE_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HBASE_LOG_DIR" = "" ]; then
- HBASE_LOG_DIR="$HBASE_HOME/logs"
-fi
-if [ "$HBASE_LOGFILE" = "" ]; then
- HBASE_LOGFILE='hbase.log'
-fi
-
-# cygwin path translation
-if $cygwin; then
- CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HBASE_HOME=`cygpath -d "$HBASE_HOME"`
- HBASE_LOG_DIR=`cygpath -d "$HBASE_LOG_DIR"`
-fi
-
-function append_path() {
- if [ -z "$1" ]; then
- echo $2
- else
- echo $1:$2
- fi
-}
-
-JAVA_PLATFORM=""
-
-#If avail, add Hadoop to the CLASSPATH and to the JAVA_LIBRARY_PATH
-if [ ! -z $HADOOP_HOME ]; then
- HADOOPCPPATH=""
- if [ -z $HADOOP_CONF_DIR ]; then
- HADOOPCPPATH=$(append_path "${HADOOPCPPATH}" "${HADOOP_HOME}/conf")
- else
- HADOOPCPPATH=$(append_path "${HADOOPCPPATH}" "${HADOOP_CONF_DIR}")
- fi
- HADOOPCPPATH=$(append_path "${HADOOPCPPATH}" `ls ${HADOOP_HOME}/hadoop-core*.jar | head -1`)
- for i in "${HADOOP_HOME}/lib/"*.jar; do
- HADOOPCPPATH="${HADOOPCPPATH}:$i"
- done
- CLASSPATH=$(append_path "${HADOOPCPPATH}" "${CLASSPATH}")
-
- if [ -d "${HADOOP_HOME}/lib/native" ]; then
- JAVA_PLATFORM=`CLASSPATH=${HADOOPCPPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
- if [ -d "${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}" ]; then
- JAVA_LIBRARY_PATH=$(append_path "${JAVA_LIBRARY_PATH}" "${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}")
- fi
- fi
-fi
-
-if [ -d "${HBASE_HOME}/build/native" -o -d "${HBASE_HOME}/lib/native" ]; then
- if [ -z $JAVA_PLATFORM ]; then
- JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
- fi
- if [ -d "$HBASE_HOME/build/native" ]; then
- JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" ${HBASE_HOME}/build/native/${JAVA_PLATFORM}/lib)
- fi
-
- if [ -d "${HBASE_HOME}/lib/native" ]; then
- JAVA_LIBRARY_PATH=$(append_path "$JAVA_LIBRARY_PATH" ${HBASE_HOME}/lib/native/${JAVA_PLATFORM})
- fi
-fi
-
-# cygwin path translation
-if $cygwin; then
- JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
-# restore ordinary behaviour
-unset IFS
-
-# figure out which class to run
-if [ "$COMMAND" = "shell" ] ; then
- # eg export JRUBY_HOME=/usr/local/share/jruby
- if [ "$JRUBY_HOME" != "" ] ; then
- CLASSPATH="$JRUBY_HOME/lib/jruby.jar:$CLASSPATH"
- HBASE_OPTS="$HBASE_OPTS -Djruby.home=$JRUBY_HOME -Djruby.lib=$JRUBY_HOME/lib"
- fi
- CLASS="org.jruby.Main ${JRUBY_OPTS} ${HBASE_HOME}/bin/hirb.rb"
-elif [ "$COMMAND" = "master" ] ; then
- CLASS='org.apache.hadoop.hbase.master.HMaster'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS"
- fi
-elif [ "$COMMAND" = "regionserver" ] ; then
- CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS"
- fi
-elif [ "$COMMAND" = "thrift" ] ; then
- CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_THRIFT_OPTS"
- fi
-elif [ "$COMMAND" = "rest" ] ; then
- CLASS='org.apache.hadoop.hbase.rest.Main'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_REST_OPTS"
- fi
-elif [ "$COMMAND" = "avro" ] ; then
- CLASS='org.apache.hadoop.hbase.avro.AvroServer'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_AVRO_OPTS"
- fi
-elif [ "$COMMAND" = "migrate" ] ; then
- CLASS='org.apache.hadoop.hbase.util.Migrate'
-elif [ "$COMMAND" = "hbck" ] ; then
- CLASS='org.apache.hadoop.hbase.util.HBaseFsck'
-elif [ "$COMMAND" = "zookeeper" ] ; then
- CLASS='org.apache.hadoop.hbase.zookeeper.HQuorumPeer'
- if [ "$1" != "stop" ] ; then
- HBASE_OPTS="$HBASE_OPTS $HBASE_ZOOKEEPER_OPTS"
- fi
-elif [ "$COMMAND" = "zkcli" ] ; then
- # ZooKeeperMainServerArg returns '-server HOST:PORT' or empty string.
- SERVER_ARG=`"$bin"/hbase org.apache.hadoop.hbase.zookeeper.ZooKeeperMainServerArg`
- CLASS="org.apache.zookeeper.ZooKeeperMain ${SERVER_ARG}"
-elif [ "$COMMAND" = "classpath" ] ; then
- echo $CLASSPATH
- exit 0
-else
- CLASS=$COMMAND
-fi
-
-# Have JVM dump heap if we run out of memory. Files will be 'launch directory'
-# and are named like the following: java_pid21612.hprof. Apparently it doesn't
-# 'cost' to have this flag enabled. Its a 1.6 flag only. See:
-# http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
-HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi
-
-# Exec unless HBASE_NOEXEC is set.
-if [ "${HBASE_NOEXEC}" != "" ]; then
- "$JAVA" $JAVA_HEAP_MAX $HBASE_OPTS -classpath "$CLASSPATH" $CLASS "$@"
-else
- exec "$JAVA" $JAVA_HEAP_MAX $HBASE_OPTS -classpath "$CLASSPATH" $CLASS "$@"
-fi
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/dashpot-hbase.html.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/dashpot-hbase.html.erb
deleted file mode 100644
index a73ce3dd96d4f342f6aa5c8d653586086af0c0b5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/dashpot-hbase.html.erb
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
- <%= node.name %> HBase Cluster
-
-
-
-
- HBase Cluster for <%= node.name %>
-
- Service Dashboards:
-
- <%- %w[ master regionserver stargate ].each do |component| %>
- <%- next unless node[:dashpot][:links]["hbase.#{component}"] %>
- - ">hbase-<%= component %>
- <%- end %>
-
-
-
- Interesting facts about my hbase installation:
-
-
- <%= summary_rows(node, @summary_keys) %>
-
-
-
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/etc_default_hbase.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/etc_default_hbase.erb
deleted file mode 100644
index 3e8aafc8be7c01de79411805d6928b5ac79dc0f6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/etc_default_hbase.erb
+++ /dev/null
@@ -1,13 +0,0 @@
-# Written by chef (hbase/recipes/config)
-
-#
-# Environment Variables
-#
-
-export HBASE_HOME=<%= @hbase[:home_dir] %>
-export HBASE_CONF_DIR=<%= @hbase[:conf_dir] %>
-export HBASE_LOG_DIR=<%= @hbase[:log_dir] %>
-export HBASE_PID_DIR=<%= @hbase[:pid_dir] %>
-
-export HBASE_IDENT_STRING=hbase
-export HBASE_NODENAME="<%= node.name.gsub(/[^\w\-]+/, '') %>"
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/export_hbase_tables.rb.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/export_hbase_tables.rb.erb
deleted file mode 100755
index 06c730c55965b0670ed36b2cfdcf94ae5b546399..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/export_hbase_tables.rb.erb
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env ruby
-
-tables_to_backup = <%= @hbase[:weekly_backup_tables].to_a.inspect %>
-
-date = Time.now.strftime("+%Y%m%d")
-
-export_dir = "<%= @hbase[:backup_location] %>/#{date}"
-hadoop_classpath = "HADOOP_CLASSPATH=/usr/lib/hbase/hbase.jar:/usr/lib/zookeeper/zookeeper.jar:/usr/lib/hbase/lib/guava-r05.jar:/usr/lib/hbase/conf"
-
-tables_to_backup.each do |table|
- system("sudo -u hbase #{hadoop_classpath} hadoop jar <%= @hbase[:home_dir] %>/hbase.jar export #{table} #{export_dir}/#{table} &")
-end
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hadoop-metrics.properties.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hadoop-metrics.properties.erb
deleted file mode 100644
index d5f68ed89735e97f2258124129ac7a1c4f2297bf..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hadoop-metrics.properties.erb
+++ /dev/null
@@ -1,61 +0,0 @@
-# See http://wiki.apache.org/hadoop/GangliaMetrics
-# Make sure you know whether you are using ganglia 3.0 or 3.1.
-# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
-# And, yes, this file is named hadoop-metrics.properties rather than
-# hbase-metrics.properties because we're leveraging the hadoop metrics
-# package and hadoop-metrics.properties is an hardcoded-name, at least
-# for the moment.
-#
-# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
-
-
-<% unless @ganglia %>
-# Configuration of the "hbase" context for null
-hbase.class=org.apache.hadoop.metrics.spi.NullContext
-# Configuration of the "jvm" context for null
-jvm.class=org.apache.hadoop.metrics.spi.NullContext
-# Configuration of the "rpc" context for null
-rpc.class=org.apache.hadoop.metrics.spi.NullContext
-<% end %>
-
-# Configuration of the "hbase" context for file
-# hbase.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
-# hbase.period=10
-# hbase.fileName=/tmp/metrics_hbase.log
-
-# Configuration of the "hbase" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-#hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-<% if @ganglia %>
-hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-hbase.period=<%= @period %>
-hbase.servers=<%= @ganglia_addr %>:<%= @ganglia_port %>
-<% end %>
-
-# Configuration of the "jvm" context for file
-# jvm.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
-# jvm.period=10
-# jvm.fileName=/tmp/metrics_jvm.log
-
-# Configuration of the "jvm" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-<% if @ganglia %>
-jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-jvm.period=<%= @period %>
-jvm.servers=<%= @ganglia_addr %>:<%= @ganglia_port %>
-<% end %>
-
-# Configuration of the "rpc" context for file
-# rpc.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
-# rpc.period=10
-# rpc.fileName=/tmp/metrics_rpc.log
-
-# Configuration of the "rpc" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-<% if @ganglia %>
-rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-rpc.period=<%= @period %>
-rpc.servers=<%= @ganglia_addr %>:<%= @ganglia_port %>
-<% end %>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-env.sh.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-env.sh.erb
deleted file mode 100755
index 4b4cb06ec2ba384ae635018fa34d9a1ce0b53d40..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-env.sh.erb
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env bash
-#/**
-# * Copyright 2007 The Apache Software Foundation
-# *
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements. See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership. The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License. You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-
-#
-# Set hbase-specific environment variables here.
-#
-# Created by Chef -- changes will be overwritten
-#
-
-# a fence, because bin/hadoop sources this like four times
-this_file="`readlink -f $BASH_SOURCE`"
-if [ "$HBASE_ENV_SOURCED" == "" ] || [ "$HBASE_ENV_SOURCED" != "$this_file" ] ; then
-
-# The java implementation to use. Java 1.6 required.
-# export JAVA_HOME=/usr/java/jdk1.6.0/
-
-# Extra Java CLASSPATH elements. Optional.
-# export HBASE_CLASSPATH=
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HBASE_HEAPSIZE=1000
-
-# Extra Java runtime options.
-# Below are what we set by default. May only work with SUN JVM.
-# For more on why as well as other possible settings,
-# see http://wiki.apache.org/hadoop/PerformanceTuning
-export HBASE_OPTS="$HBASE_OPTS -ea -XX:+HeapDumpOnOutOfMemoryError -Djava.net.preferIPv4Stack=true"
-
-# JVM options
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xms<%= @hbase[:master ][:java_heap_size_max] %> -Xmx<%= @hbase[:master ][:java_heap_size_max] %> -XX:NewSize=<%= @hbase[:master ][:java_heap_size_new] %> -XX:MaxNewSize=<%= @hbase[:master ][:java_heap_size_new] %>"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xms<%= @hbase[:regionserver][:java_heap_size_max] %> -Xmx<%= @hbase[:regionserver][:java_heap_size_max] %> -XX:NewSize=<%= @hbase[:regionserver][:java_heap_size_new] %> -XX:MaxNewSize=<%= @hbase[:regionserver][:java_heap_size_new] %>"
-
-# GC options
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS <%= @hbase[:master ][:gc_tuning_opts] %> <%= @hbase[:master ][:gc_log_opts] %> -Xloggc:/var/log/hbase/hbase-master-gc.log"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS <%= @hbase[:regionserver][:gc_tuning_opts] %> <%= @hbase[:regionserver][:gc_log_opts] %> -Xloggc:/var/log/hbase/hbase-regionserver-gc.log"
-
-# Enable JMX exporting. See jmxremote.password and jmxremote.access in
-# $JRE_HOME/lib/management to configure remote password access. More details
-# at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
-#
-# You should set the java.rmi.server.hostname to the *public* hostname if you
-# want remote access; you'll need to use security groups to open up the high end
-# of all ports (not just this one).
-#
-export HBASE_JMX_BASE="-Djava.rmi.server.hostname=<%= @hbase[:jmx_dash_addr] %> -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
-
-# JMX port
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=<%= @hbase[:master ][:jmx_dash_port] %>"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=<%= @hbase[:regionserver][:jmx_dash_port] %>"
-export HBASE_THRIFT_OPTS="$HBASE_THRIFT_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=<%= @hbase[:zookeeper ][:jmx_dash_port] %>"
-export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=<%= @hbase[:thrift ][:jmx_dash_port] %>"
-export HBASE_REST_OPTS="$HBASE_REST_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=<%= @hbase[:stargate ][:jmx_dash_port] %>"
-
-# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
-# export HBASE_REGIONSERVERS=${HBASE_HOME}/conf/regionservers
-
-# Extra ssh options. Empty by default.
-# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR -o StrictHostKeyChecking=no"
-
-# The scheduling priority for daemon processes. See 'man nice'.
-# export HBASE_NICENESS=10
-
-# Seconds to sleep between slave commands. Unset by default. This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HBASE_SLAVE_SLEEP=0.1
-
-export HBASE_NODENAME="<%= node.name.gsub(/[^\w\-]+/, '') %>"
-
-# end of double-run fence
-export HBASE_ENV_SOURCED="$this_file"
-fi
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-jruby_home-and-jruby_opts.patch b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-jruby_home-and-jruby_opts.patch
deleted file mode 100644
index 348f9cb4dd94ccafaaee0b7509324f03e69164dc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-jruby_home-and-jruby_opts.patch
+++ /dev/null
@@ -1,29 +0,0 @@
---- bin/hbase 2012-01-21 23:26:30.228765999 +0000
-+++ bin/hbase-new 2012-01-21 23:40:55.486375684 +0000
-@@ -42,6 +42,12 @@
- #
- # MAVEN_HOME Where mvn is installed.
- #
-+# JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
-+# Defaults to the jar packaged with HBase.
-+#
-+# JRUBY_OPTS Extra options (eg '--1.9') passed to the hbase shell.
-+# Empty by default.
-+#
- bin=`dirname "$0"`
- bin=`cd "$bin">/dev/null; pwd`
-
-@@ -237,7 +243,12 @@
-
- # figure out which class to run
- if [ "$COMMAND" = "shell" ] ; then
-- CLASS="org.jruby.Main ${HBASE_HOME}/bin/hirb.rb"
-+ # eg export JRUBY_HOME=/usr/local/share/jruby
-+ if [ "$JRUBY_HOME" != "" ] ; then
-+ CLASSPATH="$JRUBY_HOME/lib/jruby.jar:$CLASSPATH"
-+ HBASE_OPTS="$HBASE_OPTS -Djruby.home=$JRUBY_HOME -Djruby.lib=$JRUBY_HOME/lib"
-+ fi
-+ CLASS="org.jruby.Main ${JRUBY_OPTS} ${HBASE_HOME}/bin/hirb.rb"
- elif [ "$COMMAND" = "master" ] ; then
- CLASS='org.apache.hadoop.hbase.master.HMaster'
- if [ "$1" != "stop" ] ; then
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-site.xml.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-site.xml.erb
deleted file mode 100644
index 818189f22e06a274e29e02e9bcc3019727774bb4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/hbase-site.xml.erb
+++ /dev/null
@@ -1,95 +0,0 @@
-
-
-
-
- hbase.rootdir hdfs://<%= @namenode_fqdn %>:8020/hadoop/hbase
- hbase.tmp.dir <%= node[:hbase][:tmp_dir] %>/hbase-\${user.name}
- hbase.cluster.distributed true
- hbase.zookeeper.quorum <%= @zookeeper_addrs.join(",") %>
- hbase.rest.readonly <%= @hbase[:stargate ][:readonly] %>
-
- hbase.master.port <%= @hbase[:master ][:port] %>
- hbase.master.info.port <%= @hbase[:master ][:dash_port] %>
- hbase.regionserver.port <%= @hbase[:regionserver][:port] %>
- hbase.regionserver.info.port <%= @hbase[:regionserver][:dash_port] %>
- hbase.rest.port <%= @hbase[:stargate ][:port] %>
- hbase.zookeeper.peerport <%= @hbase[:zookeeper ][:peer_port] %>
- hbase.zookeeper.leaderport <%= @hbase[:zookeeper ][:leader_port] %>
- hbase.zookeeper.property.clientPort <%= @hbase[:zookeeper ][:client_port] %>
-
- hbase.client.write.buffer <%= @hbase[:client ][:write_buffer] %>
- hbase.client.pause <%= @hbase[:client ][:pause_period_ms] %>
- hbase.client.retries.number <%= @hbase[:client ][:retry_count] %>
- hbase.client.scanner.caching <%= @hbase[:client ][:scanner_prefetch_rows] %>
- hbase.client.keyvalue.maxsize <%= @hbase[:client ][:max_keyvalue_size] %>
-
- hbase.regionserver.lease.period <%= @hbase[:regionserver][:lease_period] %>
- hbase.regionserver.handler.count <%= @hbase[:regionserver][:handler_count] %>
- hbase.regionserver.regionSplitLimit <%= @hbase[:regionserver][:split_limit] %>
- hbase.regionserver.msginterval <%= @hbase[:regionserver][:msg_period] %>
- hbase.regionserver.optionallogflushinterval <%= @hbase[:regionserver][:log_flush_period] %>
- hbase.regionserver.logroll.period <%= @hbase[:regionserver][:logroll_period] %>
- hbase.regionserver.thread.splitcompactcheckfrequency <%= @hbase[:regionserver][:split_check_period] %>
- hbase.server.thread.wakefrequency <%= @hbase[:regionserver][:worker_period] %>
- hbase.balancer.period <%= @hbase[:regionserver][:balancer_period] %>
- hbase.regions.slop <%= @hbase[:regionserver][:balancer_slop] %>
-
- hbase.regionserver.global.memstore.upperLimit <%= @hbase[:memstore ][:flush_upper_heap_pct] %>
- hbase.regionserver.global.memstore.lowerLimit <%= @hbase[:memstore ][:flush_lower_heap_pct] %>
- hbase.hregion.memstore.flush.size <%= @hbase[:memstore ][:flush_size_trigger] %>
- hbase.hregion.preclose.flush.size <%= @hbase[:memstore ][:preflush_trigger] %>
- hbase.hregion.memstore.block.multiplier <%= @hbase[:memstore ][:flush_stall_trigger] %>
- hbase.hregion.memstore.mslab.enabled <%= @hbase[:memstore ][:mslab_enabled] %>
- hbase.hregion.max.filesize <%= @hbase[:regionserver][:max_filesize] %>
-
- hbase.hstore.compactionThreshold <%= @hbase[:compaction ][:files_trigger] %>
- hbase.hstore.blockingStoreFiles <%= @hbase[:compaction ][:pause_trigger] %>
- hbase.hstore.blockingWaitTime <%= @hbase[:compaction ][:pause_time] %>
- hbase.hstore.compaction.max <%= @hbase[:compaction ][:max_combine_files] %>
- hbase.hregion.majorcompaction <%= @hbase[:compaction ][:period] %>
-
- hbase.mapreduce.hfileoutputformat.blocksize <%= @hbase[:regionserver][:hfile_block_size] %>
- hfile.block.cache.size <%= @hbase[:regionserver][:block_cache_size] %>
-<%- if @hbase[:regionserver][:required_codecs].to_s != "" %>
- hbase.regionserver.codecs <%= @hbase[:regionserver][:required_codecs] %> <% end %>
- hbase.hash.type <%= @hbase[:regionserver][:hash_type] %>
-
- zookeeper.session.timeout <%= @hbase[:zookeeper ][:session_timeout] %>
- zookeeper.znode.parent <%= @hbase[:zookeeper ][:znode_parent] %>
- zookeeper.znode.rootserver <%= @hbase[:zookeeper ][:znode_rootserver] %>
- hbase.zookeeper.property.maxClientCnxns <%= @hbase[:zookeeper ][:max_client_connections] %>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-log-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-log-run.erb
deleted file mode 100755
index e3ae61924beaca77de4e2ea48f4110858487b22e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-log-run.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-exec svlogd -tt <%= @options[:log_dir] %>/<%= @options[:service_name] %>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-run.erb
deleted file mode 100755
index 45b9f61a84ee5b2f698c66563764a89905c98ecd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_master-run.erb
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-exec 2>&1
-
-command=<%= @options[:command_name] || @options[:service_name] %>
-
-# Include hbase defaults if available
-if [ -f "/etc/default/hbase" ] ; then
- . "/etc/default/hbase"
-fi
-
-if [ -f "<%= @options[:home_dir] %>/bin/hbase-config.sh" ] ; then
- . "<%= @options[:home_dir] %>/bin/hbase-config.sh"
-fi
-
-if [ -f "<%= @options[:conf_dir] %>/hbase-env.sh" ] ; then
- . "<%= @options[:conf_dir] %>/hbase-env.sh"
-fi
-
-# some variables normally set by bin/hbase-daemon.sh
-export HBASE_LOGFILE=hbase-$HBASE_IDENT_STRING-$command-$HBASE_NODENAME.log
-export HBASE_ROOT_LOGGER="INFO,DRFA"
-
-cd "$HBASE_HOME"
-exec chpst -u <%= @options[:user] %> "$HBASE_HOME"/bin/hbase $command start "$@" < /dev/null
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-log-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-log-run.erb
deleted file mode 100755
index e3ae61924beaca77de4e2ea48f4110858487b22e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-log-run.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-exec svlogd -tt <%= @options[:log_dir] %>/<%= @options[:service_name] %>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-run.erb
deleted file mode 100755
index 45b9f61a84ee5b2f698c66563764a89905c98ecd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_regionserver-run.erb
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-exec 2>&1
-
-command=<%= @options[:command_name] || @options[:service_name] %>
-
-# Include hbase defaults if available
-if [ -f "/etc/default/hbase" ] ; then
- . "/etc/default/hbase"
-fi
-
-if [ -f "<%= @options[:home_dir] %>/bin/hbase-config.sh" ] ; then
- . "<%= @options[:home_dir] %>/bin/hbase-config.sh"
-fi
-
-if [ -f "<%= @options[:conf_dir] %>/hbase-env.sh" ] ; then
- . "<%= @options[:conf_dir] %>/hbase-env.sh"
-fi
-
-# some variables normally set by bin/hbase-daemon.sh
-export HBASE_LOGFILE=hbase-$HBASE_IDENT_STRING-$command-$HBASE_NODENAME.log
-export HBASE_ROOT_LOGGER="INFO,DRFA"
-
-cd "$HBASE_HOME"
-exec chpst -u <%= @options[:user] %> "$HBASE_HOME"/bin/hbase $command start "$@" < /dev/null
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-log-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-log-run.erb
deleted file mode 100755
index e3ae61924beaca77de4e2ea48f4110858487b22e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-log-run.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-exec svlogd -tt <%= @options[:log_dir] %>/<%= @options[:service_name] %>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-run.erb
deleted file mode 100755
index 45b9f61a84ee5b2f698c66563764a89905c98ecd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_stargate-run.erb
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-exec 2>&1
-
-command=<%= @options[:command_name] || @options[:service_name] %>
-
-# Include hbase defaults if available
-if [ -f "/etc/default/hbase" ] ; then
- . "/etc/default/hbase"
-fi
-
-if [ -f "<%= @options[:home_dir] %>/bin/hbase-config.sh" ] ; then
- . "<%= @options[:home_dir] %>/bin/hbase-config.sh"
-fi
-
-if [ -f "<%= @options[:conf_dir] %>/hbase-env.sh" ] ; then
- . "<%= @options[:conf_dir] %>/hbase-env.sh"
-fi
-
-# some variables normally set by bin/hbase-daemon.sh
-export HBASE_LOGFILE=hbase-$HBASE_IDENT_STRING-$command-$HBASE_NODENAME.log
-export HBASE_ROOT_LOGGER="INFO,DRFA"
-
-cd "$HBASE_HOME"
-exec chpst -u <%= @options[:user] %> "$HBASE_HOME"/bin/hbase $command start "$@" < /dev/null
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-log-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-log-run.erb
deleted file mode 100755
index e3ae61924beaca77de4e2ea48f4110858487b22e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-log-run.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-exec svlogd -tt <%= @options[:log_dir] %>/<%= @options[:service_name] %>
diff --git a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-run.erb b/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-run.erb
deleted file mode 100755
index 45b9f61a84ee5b2f698c66563764a89905c98ecd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/hbase_cluster/templates/default/sv-hbase_thrift-run.erb
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-exec 2>&1
-
-command=<%= @options[:command_name] || @options[:service_name] %>
-
-# Include hbase defaults if available
-if [ -f "/etc/default/hbase" ] ; then
- . "/etc/default/hbase"
-fi
-
-if [ -f "<%= @options[:home_dir] %>/bin/hbase-config.sh" ] ; then
- . "<%= @options[:home_dir] %>/bin/hbase-config.sh"
-fi
-
-if [ -f "<%= @options[:conf_dir] %>/hbase-env.sh" ] ; then
- . "<%= @options[:conf_dir] %>/hbase-env.sh"
-fi
-
-# some variables normally set by bin/hbase-daemon.sh
-export HBASE_LOGFILE=hbase-$HBASE_IDENT_STRING-$command-$HBASE_NODENAME.log
-export HBASE_ROOT_LOGGER="INFO,DRFA"
-
-cd "$HBASE_HOME"
-exec chpst -u <%= @options[:user] %> "$HBASE_HOME"/bin/hbase $command start "$@" < /dev/null
diff --git a/lc-gdn-chef/cookbooks/lc-gdn-mail/metadata.rb b/lc-gdn-chef/cookbooks/lc-gdn-mail/metadata.rb
deleted file mode 100644
index ea68b646aa2497c8bc82772d6e9338d76778cd38..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc-gdn-mail/metadata.rb
+++ /dev/null
@@ -1,4 +0,0 @@
-name 'lc-gdn-mail'
-depends 'postfix'
-depends 'acme'
-depends 'apache2'
\ No newline at end of file
diff --git a/lc-gdn-chef/cookbooks/lc-gdn-mail/recipes/mx0-relay.rb b/lc-gdn-chef/cookbooks/lc-gdn-mail/recipes/mx0-relay.rb
deleted file mode 100644
index 32c9f0fa17c80a445bb4824f93145a4e3d077dde..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc-gdn-mail/recipes/mx0-relay.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-include_recipe 'acme'
-
-service 'apache2' do
- service_name lazy { apache_platform_service_name }
- supports restart: true, status: true, reload: true
- action :nothing
-end
-
-apache2_install 'default_install'
-apache2_module 'headers'
-apache2_module 'ssl'
-
-apache2_default_site node['name'] do
- default_site_name node['name']
- template_cookbook 'apache2'
- port '443'
- template_source 'default-site.conf.erb'
- action :enable
-end
-
-acme_selfsigned node['fqdn'] do
- crt "/etc/ssl/certs/postfix.crt"
- key "/etc/ssl/certs/postfix.key"
- chain "/etc/ssl/certs/postfix.pem"
-# owner "postfix"
-# group "postfix"
- notifies :restart, "service[apache2]", :immediate
-end
-
-# Disabled until we can switch based on public v. private nodes
-#acme_certificate node['fqdn'] do
-# crt '/etc/ssl/certs/postfix.crt'
-# key '/etc/ssl/certs/postfix.key'
-# owner "postfix"
-# group "postfix"
-# wwwroot '/var/www/html'
-# notifies :restart, "service[postfix]", :immediate
-#end
-
-node.default['postfix']['mail_type'] = 'master'
-node.default['postfix']['main']['mydomain'] = 'leigh-co.com'
-node.default['postfix']['main']['mynetworks'] = '71.197.230.0/24'
-node.default['postfix']['master']['submission']['active'] = true
-node.default['postfix']['master']['submission']['args'] = ["-o smtpd_recipient_restrictions=permit_mynetworks",
-"-o smtpd_relay_restrictions=permit_mynetworks,reject"]
-node.default['postfix']['use_transport_maps'] = true
-node.default['postfix']['use_relay_restrictions_maps'] = true
-node.default['postfix']['main']['smtp_use_tls'] = 'yes'
-node.default['postfix']['main']['smtpd_use_tls'] = 'yes'
-node.default['postfix']['main']['smtp_tls_key_file'] = '/etc/ssl/certs/postfix.key'
-node.default['postfix']['main']['smtpd_tls_key_file'] = '/etc/ssl/certs/postfix.key'
-node.default['postfix']['main']['smtp_tls_cert_file'] = '/etc/ssl/certs/postfix.crt'
-node.default['postfix']['main']['smtpd_tls_cert_file'] = '/etc/ssl/certs/postfix.crt'
-# These files were correct on EC2, but is it really true everywhere?
-node.default['postfix']['smtp_tls_CAfile'] = '/etc/ssl/certs/ca-bundle.crt'
-node.default['postfix']['smtpd_tls_CAfile'] = '/etc/ssl/certs/ca-bundle.crt'
-node.default['postfix']['smtp_tls_security_level'] = 'encrypt'
-node.default['postfix']['relay_restrictions'] = {
- "leigh-co.com" => "OK",
- "a6v.org" => "OK"
-}
-
-node.default['postfix']['maps']['hash']['/etc/postfix/transport'] = {
- "leigh-co.com" => "relay:[71.197.230.171]:587",
- "a6v.org" => "relay:[71.197.230.171]:587"
-}
-
-# Note that postfix user/group is not available until after this installs
-include_recipe 'postfix::server'
diff --git a/lc-gdn-chef/cookbooks/lc-gdn-minecraft/metadata.rb b/lc-gdn-chef/cookbooks/lc-gdn-minecraft/metadata.rb
deleted file mode 100644
index 9ef2d065301684f7442b32a98f8f08709908831e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc-gdn-minecraft/metadata.rb
+++ /dev/null
@@ -1,2 +0,0 @@
-name 'lc-gdn-minecraft'
-depends 'haproxy'
diff --git a/lc-gdn-chef/cookbooks/lc-gdn-minecraft/recipes/neweden-relay.rb b/lc-gdn-chef/cookbooks/lc-gdn-minecraft/recipes/neweden-relay.rb
deleted file mode 100644
index acf3770c0dea5951a904ca9cd3bd17c43d3b2505..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc-gdn-minecraft/recipes/neweden-relay.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-apt_update
-
-haproxy_install 'package' do
- sensitive false
-end
-
-haproxy_config_global '' do
- chroot '/var/lib/haproxy'
- daemon true
- maxconn 256
- log '/dev/log local0'
- log_tag 'WARDEN'
- pidfile '/var/run/haproxy.pid'
- stats socket: '/var/lib/haproxy/stats level admin'
- tuning 'bufsize' => '262144'
-end
-
-haproxy_config_defaults 'defaults' do
- mode 'http'
- timeout connect: '5000ms',
- client: '5000ms',
- server: '5000ms'
- haproxy_retries 5
-end
-
-haproxy_frontend 'tcp-in' do
- mode 'tcp'
- bind '*:25565'
- default_backend 'tcp-servers'
-end
-
-haproxy_backend 'tcp-servers' do
- mode 'tcp'
- server ['server2 71.197.230.171:25565 maxconn 32']
-end
-
-haproxy_service 'haproxy' do
- action %i(create enable start)
-end
diff --git a/lc-gdn-chef/cookbooks/lc/metadata.rb b/lc-gdn-chef/cookbooks/lc/metadata.rb
deleted file mode 100644
index c0db2a365c818a5224c633c53627e36337663cf7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc/metadata.rb
+++ /dev/null
@@ -1 +0,0 @@
-name 'lc'
\ No newline at end of file
diff --git a/lc-gdn-chef/cookbooks/lc/recipes/hostname.rb b/lc-gdn-chef/cookbooks/lc/recipes/hostname.rb
deleted file mode 100644
index c57ccbed6566dabbb66b6856b9d5997f8da1954e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc/recipes/hostname.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-hostname node.name do
- action :set
-end
diff --git a/lc-gdn-chef/cookbooks/lc/recipes/ssh.rb b/lc-gdn-chef/cookbooks/lc/recipes/ssh.rb
deleted file mode 100644
index 4c0740981d4bb2d675e8c4dbaf13a1d94ca40130..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/lc/recipes/ssh.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-package 'openssh' do
- action :upgrade
-end
-
-service 'sshd' do
- action [:enable, :start]
-end
\ No newline at end of file
diff --git a/lc-gdn-chef/cookbooks/postfix/CHANGELOG.md b/lc-gdn-chef/cookbooks/postfix/CHANGELOG.md
deleted file mode 100644
index 50c79d32498de1f0974e3ed6e2028cd11170c3c2..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/CHANGELOG.md
+++ /dev/null
@@ -1,313 +0,0 @@
-# postfix Cookbook CHANGELOG
-
-This file is used to list changes made in each version of the postfix cookbook.
-
-## 6.0.7 - *2022-02-03*
-
-Standardise files with files in sous-chefs/repo-management
-
-## 6.0.6 - *2022-02-02*
-
-- Update tested platforms
-- Remove delivery and move to calling RSpec directly via a reusable workflow
-
-## 6.0.5 - *2022-01-08*
-
-- resolved cookstyle error: test/integration/helpers/serverspec/spec_helper.rb:9:21 convention: `Style/FileRead`
-
-## 6.0.4 - *2021-08-19*
-
-## 6.0.3 - *2021-08-19*
-
-- Fixed TLS configuration
-
-## 6.0.2 - *2021-06-30*
-
-- Make sure we write the main.conf and master.conf before we try to use any commands (like postmap)
-
-## 6.0.1 - *2021-06-01*
-
-## 6.0.0 - *2020-11-23*
-
-- Disabled SSLv3 by default
-
-## 5.4.1 - 2020-10-20
-
-- Ensure all postmap files are rebuilt immediately if needed
-
-## 5.4.0 - 2020-10-11
-
-### Changed
-
-- Sous Chefs Adoption
-- Update to use Sous Chefs GH workflow
-- Update README to sous-chefs
-- Update metadata.rb to Sous Chefs
-- Update test-kitchen to Sous Chefs
-
-### Added
-
-- Standardise files with files in sous-chefs/repo-management
-- Add Ubuntu 20.04 testing
-
-### Fixed
-
-- Cookstyle fixes
-- ChefSpec fixes
-- Yamllint fixes
-- MDL fixes
-- Fix OpenSUSE installation issues
-
-### Removed
-
-- Remove EL 6 testing
-- Remove Amazon Linux 1 testing
-
-## 5.3.1 (2018-07-24)
-
-- Fixed sbin issue with Chef13
-
-## 5.3.0 (2018-05-23)
-
-- support multiple sasl_passwd entries
-- Add `packages` attribute so different postfix packages can be installed
-- add ability to set network connection port for a remote relayhost
-
-## 5.2.1 (2017-11-22)
-
-- Properly support FreeBSD
-- Do not run service restart for solaris which fails
-
-## 5.2.0 (2017-08-07)
-
-- Lazily evaluate the config template variables to allow overrides to properly apply
-- Avoid Chefspec deprecation warnings
-
-## 5.1.1 (2017-07-28)
-
-- Fix support for Amazon Linux on Chef 13
-- Expand testing to cover Debian 9 in Travis
-
-## 5.1.0 (2017-07-28)
-
-- Add an option to allow recipient canonical maps
-
-## 5.0.3 (2017-06-26)
-
-- Correct attribute line for use_relay_restrictions_maps to prevent converge failures
-
-## 5.0.2 (2017-05-17)
-
-- Fix use_relay_restrictions_maps attribute misspelling in attributes file
-
-## 5.0.1 (2017-03-03)
-
-- Fix documentation error on inet-interfaces
-- Test with Local Delivery instead of Rake
-- Fix master.cf attributes types on README
-
-## 5.0.0 (2017-01-17)
-
-- Manage any hash: tables for postfix with hash_maps recipe
-- Fully customizable master.cf file
-- Support for any kind of postfix lookup tables
-- Remove old minitest files
-- Update chef requirement in the readme
-- Update tests for new config comment blocks
-- fixing /etc/aliases syntax for full-mailaddresses
-
-## 4.0.0 (2016-09-07)
-
-- Update supported platforms in metadata
-- Remove node name from config file
-- Testing updates
-- Use node.normal vs. node.set to avoid deprecation warnings
-- Require Chef 12+
-
-## v3.8.0 (2016-04-01)
-
-- Updated attributes to use node.default_unless instead of node.default to be more wrapper friendly
-- Added integration and unit testing in Travis CI
-- Added rubocop config and resolved rubocop warnings
-- Added Gemfile with all necessary test deps
-- Added standard gitignore and chefignore files
-- Added updated contributing and testing docs
-- Removed the Kitchen Digital Ocean files and dependencies
-- Added additional platforms to the Test Kitchen config
-- Added a Rakefile for simplified testing
-- Fixed a typo in the use_relay_restrictions_maps attribute that prevented the default from being set
-- Added fedora and oracle as supported platforms in the metadata
-- Removed the attributes from the metadata.
-- Added long_description to the metadata
-- Added Chef 11 compatibility checks to issues_url and source_url in metadata.rb
-- Added maintainers.md and maintainers.toml files
-
-## v3.7.0 (2015-04-30)
-
-- Adding support for relay restrictions
-- Update chefspec and serverspec tests
-
-## v3.6.2 (2014-10-31)
-
-- Fix FreeBSDisms
-
-## v3.6.1 (2014-10-28)
-
-- Fix documentation around node['postfix']['main']['relayhost'] attribute
-- Fix logic around include_recipe 'postfix::virtual_aliases_domains'
-
-## v3.6.0 (2014-08-25)
-
-- restart postfix after updating virtual alias templates #86
-- fixing typo for alias_db location in omnios
-- moving conditional attributes to a recipe so they can be modified
-- via other cookbook attributes
-
-## v3.5.0 (2014-08-25)
-
-Adding virtual_domains functionality
-
-## v3.4.1 (2014-08-20)
-
-Removing unused parameters from main.cf
-
-## v3.4.0 (2014-07-25)
-
-Refactoring to fix some logic issues
-
-## v3.3.1 (2014-06-11)
-
-Reverting #37 - [COOK-3418] Virtual Domain Support PR - duplicate of #55
-
-## v3.3.0 (2014-06-11)
-
-- 37 - [COOK-3418] - Virtual Domain Support
-- 44 - Fix minor formatting issue in attributes
-- 55 - Add support for virtual aliases
-- 57 - Fixing attributes bug in README
-- 64 - add smtp_generic maps configuration option
-- 66 - [COOK-3652] Add support for transport mappings
-- 67 - [COOK-4662] Added support for access control
-- 68 - Properly handle binding to loopback on mixed IPV4/IPV6 systems
-
-## v3.2.0 (2014-05-09)
-
-- [COOK-4619] - no way to unset recipient_delimiter
-
-## v3.1.8 (2014-03-27)
-
-- [COOK-4410] - Fix sender_canonical configuration by adding template
-- and postmap execution
-
-## v3.1.6 (2014-03-19)
-
-- [COOK-4423] - use platform_family, find cert.pem on rhel
-
-## v3.1.4 (2014-02-27)
-
-[COOK-4329] Migrate minitest PITs to latest test-kitchen + serverspec
-
-## v3.1.2 (2014-02-19)
-
-### Bug
-
-- **[COOK-4357](https://tickets.chef.io/browse/COOK-4357)** - postfix::sasl_auth recipe fails to converge
-
-## v3.1.0 (2014-02-19)
-
-### Bug
-
-- **[COOK-4322](https://tickets.chef.io/browse/COOK-4322)** - Postfix cookbook has incorrect default path for sasl_passwd
-
-### New Feature
-
-- **[COOK-4086](https://tickets.chef.io/browse/COOK-4086)** - use conf_dir attribute for sasl recipe, and add omnios support
-- **[COOK-2551](https://tickets.chef.io/browse/COOK-2551)** - Support creating the sender_canonical map file
-
-## v3.0.4
-
-### Bug
-
-- **[COOK-3824](https://tickets.chef.io/browse/COOK-3824)** - main.cf.erb mishandles lists
-
-### Improvement
-
-- **[COOK-3822](https://tickets.chef.io/browse/COOK-3822)** - postfix cookbook readme has an incorrect example
-- Got rubocop errors down to 32
-
-### New Feature
-
-- **[COOK-2551](https://tickets.chef.io/browse/COOK-2551)** - Support creating the sender_canonical map file
-
-## v3.0.2
-
-### Bug
-
-- **[COOK-3617](https://tickets.chef.io/browse/COOK-3617)** - Fix error when no there is no FQDN
-- **[COOK-3530](https://tickets.chef.io/browse/COOK-3530)** - Update `client.rb` after 3.0.0 refactor
-- **[COOK-2499](https://tickets.chef.io/browse/COOK-2499)** - Do not use resource cloning
-
-### Improvement
-
-- **[COOK-3116](https://tickets.chef.io/browse/COOK-3116)** - Add SmartOS support
-
-## v3.0.0
-
-### Improvement
-
-- **[COOK-3328](https://tickets.chef.io/browse/COOK-3328)** - Postfix main/master and attributes refactor
-
-**Breaking changes**:
-
-- Attributes are namespaced as `node['postfix']`, `node['postfix']['main']`, and `node['postfix']['master']`.
-
-## v2.1.6
-
-### Bug
-
-- [COOK-2501]: Reference to `['postfix']['domain']` should be `['postfix']['mydomain']`
-- [COOK-2715]: master.cf uses old name for `smtp_fallback_relay` (`fallback_relay`) parameter in master.cf
-
-## v2.1.4
-
-- [COOK-2281] - postfix aliases uses require_recipe statement
-
-## v2.1.2
-
-- [COOK-2010] - postfix sasl_auth does not include the sasl plain package
-
-## v2.1.0
-
-- [COOK-1233] - optional configuration for canonical maps
-- [COOK-1660] - allow comma separated arrays in aliases
-- [COOK-1662] - allow inet_interfaces configuration via attribute
-
-## v2.0.0
-
-This version uses platform_family attribute, making the cookbook incompatible with older versions of Chef/Ohai, hence the major version bump.
-
-- [COOK-1535] - `smtpd_cache` should be in `data_directory`, not `queue_directory`
-- [COOK-1790] - /etc/aliases template is only in ubuntu directory
-- [COOK-1792] - add minitest-chef tests to postfix cookbook
-
-## v1.2.2
-
-- [COOK-1442] - Missing ['postfix']['domain'] Attribute causes initial installation failure
-- [COOK-1520] - Add support for procmail delivery
-- [COOK-1528] - Make aliasses template less specific
-- [COOK-1538] - Add iptables_rule template
-- [COOK-1540] - Add smtpd_milters and non_smtpd_milters parameters to main.cf
-
-## v1.2.0
-
-- [COOK-880] - add client/server roles for search-based discovery of relayhost
-
-## v1.0.0
-
-- [COOK-668] - RHEL/CentOS/Scientific/Amazon platform support
-- [COOK-733] - postfix::aliases recipe to manage /etc/aliases
-- [COOK-821] - add README.md :)
-
-## v0.8.4
-
-- Current public release.
diff --git a/lc-gdn-chef/cookbooks/postfix/LICENSE b/lc-gdn-chef/cookbooks/postfix/LICENSE
deleted file mode 100644
index 8f71f43fee3f78649d238238cbde51e6d7055c82..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
diff --git a/lc-gdn-chef/cookbooks/postfix/README.md b/lc-gdn-chef/cookbooks/postfix/README.md
deleted file mode 100644
index 0be8eb15022e15474cc7fdd1155c11549efdd77a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/README.md
+++ /dev/null
@@ -1,477 +0,0 @@
-# postfix Cookbook
-
-[](https://supermarket.chef.io/cookbooks/postfix)
-[](https://github.com/sous-chefs/postfix/actions?query=workflow%3Aci)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-Installs and configures postfix for client or outbound relayhost, or to do SASL authentication.
-
-On RHEL-family systems, sendmail will be replaced with postfix.
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working together to maintain important cookbooks. If you’d like to know more please visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-## Requirements
-
-### Platforms
-
-- Ubuntu
-- Debian
-- RHEL/CentOS/Scientific
-- Amazon Linux (as of AMIs created after 4/9/2012)
-- FreeBSD
-
-May work on other platforms with or without modification.
-
-### Chef
-
-- Chef 12.1+
-
-### Cookbooks
-
-- none
-
-## Attributes
-
-See `attributes/default.rb` for default values.
-
-### Generic cookbook attributes
-
-- `node['postfix']['mail_type']` - Sets the kind of mail configuration. `master` will set up a server (relayhost).
-- `node['postfix']['relayhost_role']` - name of a role used for search in the client recipe.
-- `node['postfix']['relayhost_port']` - listening network port of the relayhost.
-- `node['postfix']['multi_environment_relay']` - set to true if nodes should not constrain search for the relayhost in their own environment.
-- `node['postfix']['use_procmail']` - set to true if nodes should use procmail as the delivery agent.
-- `node['postfix']['use_alias_maps']` - set to true if you want the cookbook to use/configure alias maps
-- `node['postfix']['use_transport_maps']` - set to true if you want the cookbook to use/configure transport maps
-- `node['postfix']['use_access_maps']` - set to true if you want the cookbook to use/configure access maps
-- `node['postfix']['use_virtual_aliases']` - set to true if you want the cookbook to use/configure virtual alias maps
-- `node['postfix']['use_relay_restrictions_maps']` - set to true if you want the cookbook to use/configure a list of domains to which postfix will allow relay
-- `node['postfix']['aliases']` - hash of aliases to create with `recipe[postfix::aliases]`, see below under **Recipes** for more information.
-- `node['postfix']['transports']` - hash of transports to create with `recipe[postfix::transports]`, see below under **Recipes** for more information.
-- `node['postfix']['access']` - hash of access to create with `recipe[postfix::access]`, see below under **Recipes** for more information.
-- `node['postfix']['virtual_aliases']` - hash of virtual_aliases to create with `recipe[postfix::virtual_aliases]`, see below under __Recipes__ for more information.
-- `node['postfix']['main_template_source']` - Cookbook source for main.cf template. Default 'postfix'
-- `node['postfix']['master_template_source']` - Cookbook source for master.cf template. Default 'postfix'
-
-### main.cf and sasl_passwd template attributes
-
-The main.cf template has been simplified to include any attributes in the `node['postfix']['main']` data structure. The following attributes are still included with this cookbook to maintain some semblance of backwards compatibility.
-
-This change in namespace to `node['postfix']['main']` should allow for greater flexibility, given the large number of configuration variables for the postfix daemon. All of these cookbook attributes correspond to the option of the same name in `/etc/postfix/main.cf`.
-
-- `node['postfix']['main']['biff']` - (yes/no); default no
-- `node['postfix']['main']['append_dot_mydomain']` - (yes/no); default no
-- `node['postfix']['main']['myhostname']` - defaults to fqdn from Ohai
-- `node['postfix']['main']['mydomain']` - defaults to domain from Ohai
-- `node['postfix']['main']['myorigin']` - defaults to $myhostname
-- `node['postfix']['main']['mynetworks']` - default is nil, which forces Postfix to default to loopback addresses.
-- `node['postfix']['main']['inet_interfaces']` - set to `loopback-only`, or `all` for server recipe
-- `node['postfix']['main']['alias_maps']` - set to `hash:/etc/aliases`
-- `node['postfix']['main']['mailbox_size_limit']` - set to `0` (disabled)
-- `node['postfix']['main']['mydestination']` - default fqdn, hostname, localhost.localdomain, localhost
-- `node['postfix']['main']['smtpd_use_tls']` - (yes/no); default yes. See conditional cert/key attributes.
-- `node['postfix']['main']['smtpd_tls_cert_file']` - conditional attribute, set to full path of server's x509 certificate.
-- `node['postfix']['main']['smtpd_tls_key_file']` - conditional attribute, set to full path of server's private key
-- `node['postfix']['main']['smtpd_tls_CAfile']` - set to platform specific CA bundle
-- `node['postfix']['main']['smtpd_tls_session_cache_database']` - set to `btree:${data_directory}/smtpd_scache`
-- `node['postfix']['main']['smtp_use_tls']` - (yes/no); default yes. See following conditional attributes.
-- `node['postfix']['main']['smtp_tls_CAfile']` - set to platform specific CA bundle
-- `node['postfix']['main']['smtp_tls_session_cache_database']` - set to `btree:${data_directory}/smtpd_scache`
-- `node['postfix']['main']['smtp_sasl_auth_enable']` - (yes/no); default no. If enabled, see following conditional attributes.
-- `node['postfix']['main']['smtp_sasl_password_maps']` - Set to `hash:/etc/postfix/sasl_passwd` template file
-- `node['postfix']['main']['smtp_sasl_security_options']` - Set to noanonymous
-- `node['postfix']['main']['relayhost']` - Set to empty string
-- `node['postfix']['sender_canonical_map_entries']` - (hash with key value pairs); default not configured. Setup generic canonical maps. See `man 5 canonical`. If has at least one value, then will be enabled in config.
-- `node['postfix']['smtp_generic_map_entries']` - (hash with key value pairs); default not configured. Setup generic postfix maps. See `man 5 generic`. If has at least one value, then will be enabled in config.
-- `node['postfix']['recipient_canonical_map_entries']` - (hash with key value pairs); default not configured. Setup generic canonical maps. See `man 5 canonical`. If has at least one value, then will be enabled in config.
-- `node['postfix']['sasl']['smtp_sasl_user_name']` - SASL user to authenticate as. Default empty. You can only use this until the current version. The new syntax is below.
-- `node['postfix']['sasl']['smtp_sasl_passwd']` - SASL password to use. Default empty. You can only use this until the current version. The new syntax is below.
-- `node['postfix']['sasl']` = ```json {
- "relayhost1" => {
- 'username' => 'foo',
- 'password' => 'bar'
- },
- "relayhost2" => {
- ...
- }
- }``` - You must set the following attribute, otherwise the attribute will default to empty
-
-Example of json role config, for setup *_map_entries:
-
-`postfix : {`
-
-`...`
-
-`"smtp_generic_map_entries" : { "root@youinternaldomain.local" : "admin@example.com", "admin@youinternaldomain.local" : "admin@example.com" }`
-
-`}`
-
-### master.cf template attributes
-
-The master.cf template has been changed to allow full customization of the file content. For purpose of backwards compatibility default attributes generate the same master.cf. But via `node['postfix']['master']` data structure in your role for instance it can be completelly rewritten.
-
-Examples of json role config, for customize master.cf:
-
-`postfix : {`
-
-`...`
-
-turn some services off or on:
-
-```json
- "master" : {
- "smtps": {
- "active": true
- },
- "old-cyrus": {
- "active": false
- },
- "cyrus": {
- "active": false
- },
- "uucp": {
- "active": false
- },
- "ifmail": {
- "active": false
- },
-```
-
-`...` define you own service:
-
-```json
- "spamfilter": {
- "comment": "My own spamfilter",
- "active": true,
- "order": 590,
- "type": "unix",
- "unpriv": false,
- "chroot": false,
- "command": "pipe",
- "args": ["flags=Rq user=spamd argv=/usr/bin/spamfilter.sh -oi -f ${sender} ${recipient}"]
- }
-```
-
-`...`
-
-`}` `}`
-
-The possible service hash fields and their meanings: hash key - have to be unique, unless you wish to override default definition.
-
-Field | Mandatory | Description
-------- | --------- | --------------------------------------------------------------------
-active | Yes | Boolean. Defines whether or not the service needs to be in master.cf
-comment | No | String. If you would like to add a comment line before service line
-order | Yes | Integer. Number to define the order of lines in the file
-type | Yes | String. Type of the service (inet, unix, fifo)
-private | No | Boolean. If present replaced by `y` or `n`, otherwise by `-`
-unpriv | No | Boolean. If present replaced by `y` or `n`, otherwise by `-`
-chroot | No | Boolean. If present replaced by `y` or `n`, otherwise by `-`
-wakeup | No | String. If present value placed in file, otherwise replaced by `-`
-maxproc | No | String. If present value placed in file, otherwise replaced by `-`
-command | Yes | String. The command to be executed.
-args | Yes | Array of Strings. Arguments passed to command.
-
-For more information about meaning of the fields consult `master (5)` manual:
-
-## Recipes
-
-### default
-
-Installs the postfix package and manages the service and the main configuration files (`/etc/postfix/main.cf` and `/etc/postfix/master.cf`). See **Usage** and **Examples** to see how to affect behavior of this recipe through configuration. Depending on the `node['postfix']['use_alias_maps']`, `node['postfix']['use_transport_maps']`, `node['postfix']['use_access_maps']` and `node['postfix']['use_virtual_aliases']` attributes the default recipe can call additional recipes to manage additional postfix configuration files
-
-For a more dynamic approach to discovery for the relayhost, see the `client` and `server` recipes below.
-
-### client
-
-Use this recipe to have nodes automatically search for the mail relay based which node has the `node['postfix']['relayhost_role']` role. Sets the `node['postfix']['main']['relayhost']` attribute to the first result from the search.
-
-Includes the default recipe to install, configure and start postfix.
-
-Does not work with `chef-solo`.
-
-### sasl_auth
-
-Sets up the system to authenticate with a remote mail relay using SASL authentication.
-
-### server
-
-To use Chef Server search to automatically detect a node that is the relayhost, use this recipe in a role that will be relayhost. By default, the role should be "relayhost" but you can change the attribute `node['postfix']['relayhost_role']` to modify this.
-
-**Note** This recipe will set the `node['postfix']['mail_type']` to "master" with an override attribute.
-
-### maps
-
-General recipe to manage any number of any type postfix lookup tables. You can replace with it recipes like `transport` or `virtual_aliases`, but what is more important - you can create any kinds of maps, which has no own recipe, including database lookup maps configuration. `maps` is a hash keys of which is a lookup table type and value is another hash with filenames as the keys and hash with file content as the value. File content is an any number of key/value pairs which meaning depends on lookup table type. Examlle:
-
-```json
- "override_attributes": {
- "postfix": {
- "maps": {
- "hash": {
- "/etc/postfix/vmailbox": {
- "john@example.com": "ok",
- "john@example.net": "ok",
- },
- "/etc/postfix/virtual": {
- "postmaster@example.com": "john@example.com",
- "postmaster@example.net": "john@example.net",
- "root@mail.example.net": "john@example.net"
- },
- "/etc/postfix/envelope_senders": {
- "@example.com": "john@example.com",
- "@example.net": "john@example.net"
- },
- "/etc/postfix/relay_recipients": {
- "john@example.net": "ok",
- "john@example.com": "ok",
- "admin@example.com": "ok",
- }
- },
- "pgsql": {
- "/etc/postfix/pgtest": {
- "hosts": "db.local:2345",
- "user": "postfix",
- "password": "test",
- "dbname": "postdb",
- "query": "SELECT replacement FROM aliases WHERE mailbox = '%s'"
- }
- }
- }
- }
-```
-
-To use these files in your configuration reference them in `node['postfix']['main']`, for instance:
-
-```json
- "postfix": {
- "main": {
- "smtpd_sender_login_maps": "hash:/etc/postfix/envelope_senders",
- "relay_recipient_maps": "hash:/etc/postfix/relay_recipients",
- "virtual_mailbox_maps": "hash:/etc/postfix/vmailbox",
- "virtual_alias_maps": "hash:/etc/postfix/virtual",
- }
- }
-```
-
-### aliases
-
-Manage `/etc/aliases` with this recipe. Currently only Ubuntu 10.04 platform has a template for the aliases file. Add your aliases template to the `templates/default` or to the appropriate platform+version directory per the File Specificity rules for templates. Then specify a hash of aliases for the `node['postfix']['aliases']` attribute.
-
-Arrays are supported as alias values, since postfix supports comma separated values per alias, simply specify your alias as an array to use this handy feature.
-
-### aliases
-
-Manage `/etc/aliases` with this recipe.
-
-### transports
-
-Manage `/etc/postfix/transport` with this recipe.
-
-### access
-
-Manage `/etc/postfix/access` with this recipe.
-
-### virtual_aliases
-
-Manage `/etc/postfix/virtual` with this recipe.
-
-### relay_restrictions
-
-Manage `/etc/postfix/relay_restriction` with this recipe The postfix option smtpd_relay_restrictions in main.cf will point to this hash map db.
-
-
-
-## Usage
-
-On systems that should simply send mail directly to a relay, or out to the internet, use `recipe[postfix]` and modify the `node['postfix']['main']['relayhost']` attribute via a role.
-
-On systems that should be the MX for a domain, set the attributes accordingly and make sure the `node['postfix']['mail_type']` attribute is `master`. See **Examples** for information on how to use `recipe[postfix::server]` to do this automatically.
-
-If you need to use SASL authentication to send mail through your ISP (such as on a home network), use `postfix::sasl_auth` and set the appropriate attributes.
-
-For each of these implementations, see **Examples** for role usage.
-
-### Examples
-
-The example roles below only have the relevant postfix usage. You may have other contents depending on what you're configuring on your systems.
-
-The `base` role is applied to all nodes in the environment.
-
-```ruby
-name "base"
-run_list("recipe[postfix]")
-override_attributes(
- "postfix" => {
- "mail_type" => "client",
- "main" => {
- "mydomain" => "example.com",
- "myorigin" => "example.com",
- "relayhost" => "[smtp.example.com]",
- "smtp_use_tls" => "no"
- }
- }
-)
-```
-
-The `relayhost` role is applied to the nodes that are relayhosts. Often this is 2 systems using a CNAME of `smtp.example.com`.
-
-```ruby
-name "relayhost"
-run_list("recipe[postfix::server]")
-override_attributes(
- "postfix" => {
- "mail_type" => "master",
- "main" => {
- "mynetworks" => [ "10.3.3.0/24", "127.0.0.0/8" ],
- "inet_interfaces" => "all",
- "mydomain" => "example.com",
- "myorigin" => "example.com"
- }
-)
-```
-
-The `sasl_relayhost` role is applied to the nodes that are relayhosts and require authenticating with SASL. For example this might be on a household network with an ISP that otherwise blocks direct internet access to SMTP.
-
-```ruby
-name "sasl_relayhost"
-run_list("recipe[postfix], recipe[postfix::sasl_auth]")
-override_attributes(
- "postfix" => {
- "mail_type" => "master",
- "main" => {
- "mynetworks" => "10.3.3.0/24",
- "mydomain" => "example.com",
- "myorigin" => "example.com",
- "relayhost" => "[smtp.comcast.net]:587",
- "smtp_sasl_auth_enable" => "yes"
- },
- "sasl" => {
- "relayhost1" => {
- "username" => "your_password",
- "password" => "your_username"
- },
- "relayhost2" => {
- ...
- },
- ...
- }
- }
-)
-```
-
-For an example of using encrypted data bags to encrypt the SASL password, see the following blog post:
-
--
-
-#### Examples using the client & server recipes
-
-If you'd like to use the more dynamic search based approach for discovery, use the server and client recipes. First, create a relayhost role.
-
-```ruby
-name "relayhost"
-run_list("recipe[postfix::server]")
-override_attributes(
- "postfix" => {
- "main" => {
- "mynetworks" => "10.3.3.0/24",
- "mydomain" => "example.com",
- "myorigin" => "example.com"
- }
- }
-)
-```
-
-Then, add the `postfix::client` recipe to the run list of your `base` role or equivalent role for postfix clients.
-
-```ruby
-name "base"
-run_list("recipe[postfix::client]")
-override_attributes(
- "postfix" => {
- "mail_type" => "client",
- "main" => {
- "mydomain" => "example.com",
- "myorigin" => "example.com"
- }
- }
-)
-```
-
-If you wish to use a different role name for the relayhost, then also set the attribute in the `base` role. For example, `postfix_master` as the role name:
-
-```ruby
-name "postfix_master"
-description "a role for postfix master that isn't relayhost"
-run_list("recipe[postfix::server]")
-override_attributes(
- "postfix" => {
- "main" => {
- "mynetworks" => "10.3.3.0/24",
- "mydomain" => "example.com",
- "myorigin" => "example.com"
- }
- }
-)
-```
-
-The base role would look something like this:
-
-```ruby
-name "base"
-run_list("recipe[postfix::client]")
-override_attributes(
- "postfix" => {
- "relayhost_role" => "postfix_master",
- "mail_type" => "client",
- "main" => {
- "mydomain" => "example.com",
- "myorigin" => "example.com"
- }
- }
-)
-```
-
-To use relay restrictions override the relay restrictions attribute in this format:
-
-```ruby
-override_attributes(
- "postfix" => {
- "use_relay_restrictions_maps" => true,
- "relay_restrictions" => {
- "chef.io" => "OK",
- ".chef.io" => "OK",
- "example.com" => "OK"
- }
- }
-)
-```
-
-## Contributors
-
-This project exists thanks to all the people who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/postfix/attributes/default.rb b/lc-gdn-chef/cookbooks/postfix/attributes/default.rb
deleted file mode 100644
index 6ec3d882207f5baffa82f6ad5eecb3a769364939..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/attributes/default.rb
+++ /dev/null
@@ -1,410 +0,0 @@
-# Author:: Joshua Timberman
-# Copyright:: 2009-2019, Chef Software, Inc.
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-default['postfix']['packages'] = %w(postfix)
-
-# Generic cookbook attributes
-default['postfix']['mail_type'] = 'client'
-default['postfix']['relayhost_role'] = 'relayhost'
-default['postfix']['relayhost_port'] = '25'
-default['postfix']['multi_environment_relay'] = false
-default['postfix']['use_procmail'] = false
-default['postfix']['use_alias_maps'] = platform?('freebsd')
-default['postfix']['use_transport_maps'] = false
-default['postfix']['use_access_maps'] = false
-default['postfix']['use_virtual_aliases'] = false
-default['postfix']['use_virtual_aliases_domains'] = false
-default['postfix']['use_relay_restrictions_maps'] = false
-default['postfix']['transports'] = {}
-default['postfix']['access'] = {}
-default['postfix']['virtual_aliases'] = {}
-default['postfix']['virtual_aliases_domains'] = {}
-default['postfix']['main_template_source'] = 'postfix'
-default['postfix']['master_template_source'] = 'postfix'
-default['postfix']['sender_canonical_map_entries'] = {}
-default['postfix']['smtp_generic_map_entries'] = {}
-default['postfix']['recipient_canonical_map_entries'] = {}
-default['postfix']['access_db_type'] = 'hash'
-default['postfix']['aliases_db_type'] = 'hash'
-default['postfix']['transport_db_type'] = 'hash'
-default['postfix']['virtual_alias_db_type'] = 'hash'
-default['postfix']['virtual_alias_domains_db_type'] = 'hash'
-
-case node['platform']
-when 'smartos'
- default['postfix']['conf_dir'] = '/opt/local/etc/postfix'
- default['postfix']['aliases_db'] = '/opt/local/etc/postfix/aliases'
- default['postfix']['transport_db'] = '/opt/local/etc/postfix/transport'
- default['postfix']['access_db'] = '/opt/local/etc/postfix/access'
- default['postfix']['virtual_alias_db'] = '/opt/local/etc/postfix/virtual'
- default['postfix']['virtual_alias_domains_db'] = '/opt/local/etc/postfix/virtual_domains'
- default['postfix']['relay_restrictions_db'] = '/opt/local/etc/postfix/relay_restrictions'
-when 'freebsd'
- default['postfix']['conf_dir'] = '/usr/local/etc/postfix'
- default['postfix']['aliases_db'] = '/etc/aliases'
- default['postfix']['transport_db'] = '/usr/local/etc/postfix/transport'
- default['postfix']['access_db'] = '/usr/local/etc/postfix/access'
- default['postfix']['virtual_alias_db'] = '/usr/local/etc/postfix/virtual'
- default['postfix']['virtual_alias_domains_db'] = '/usr/local/etc/postfix/virtual_domains'
- default['postfix']['relay_restrictions_db'] = '/etc/postfix/relay_restrictions'
-when 'omnios'
- default['postfix']['conf_dir'] = '/opt/omni/etc/postfix'
- default['postfix']['aliases_db'] = '/opt/omni/etc/postfix/aliases'
- default['postfix']['transport_db'] = '/opt/omni/etc/postfix/transport'
- default['postfix']['access_db'] = '/opt/omni/etc/postfix/access'
- default['postfix']['virtual_alias_db'] = '/etc/omni/etc/postfix/virtual'
- default['postfix']['virtual_alias_domains_db'] = '/etc/omni/etc/postfix/virtual_domains'
- default['postfix']['relay_restrictions_db'] = '/opt/omni/etc/postfix/relay_restrictions'
- default['postfix']['uid'] = 11
-else
- default['postfix']['conf_dir'] = '/etc/postfix'
- default['postfix']['aliases_db'] = '/etc/aliases'
- default['postfix']['transport_db'] = '/etc/postfix/transport'
- default['postfix']['access_db'] = '/etc/postfix/access'
- default['postfix']['virtual_alias_db'] = '/etc/postfix/virtual'
- default['postfix']['virtual_alias_domains_db'] = '/etc/postfix/virtual_domains'
- default['postfix']['relay_restrictions_db'] = '/etc/postfix/relay_restrictions'
-end
-
-# Non-default main.cf attributes
-default['postfix']['main']['biff'] = 'no'
-default['postfix']['main']['append_dot_mydomain'] = 'no'
-default['postfix']['main']['myhostname'] = (node['fqdn'] || node['hostname']).to_s.chomp('.')
-default['postfix']['main']['mydomain'] = (node['domain'] || node['hostname']).to_s.chomp('.')
-default['postfix']['main']['myorigin'] = '$myhostname'
-default['postfix']['main']['mydestination'] = [node['postfix']['main']['myhostname'], node['hostname'], 'localhost.localdomain', 'localhost'].compact
-default['postfix']['main']['smtpd_use_tls'] = 'yes'
-default['postfix']['main']['smtp_use_tls'] = 'yes'
-default['postfix']['main']['smtpd_tls_mandatory_protocols'] = '!SSLv2,!SSLv3'
-default['postfix']['main']['smtp_tls_mandatory_protocols'] = '!SSLv2,!SSLv3'
-default['postfix']['main']['smtpd_tls_protocols'] = '!SSLv2,!SSLv3'
-default['postfix']['main']['smtp_tls_protocols'] = '!SSLv2,!SSLv3'
-default['postfix']['main']['smtp_sasl_auth_enable'] = 'no'
-default['postfix']['main']['mailbox_size_limit'] = 0
-default['postfix']['main']['mynetworks'] = nil
-default['postfix']['main']['inet_interfaces'] = 'loopback-only'
-
-# Conditional attributes, also reference _attributes recipe
-case node['platform_family']
-when 'debian'
- default['postfix']['cafile'] = '/etc/ssl/certs/ca-certificates.crt'
-when 'smartos'
- default['postfix']['main']['smtpd_use_tls'] = 'no'
- default['postfix']['main']['smtp_use_tls'] = 'no'
- default['postfix']['cafile'] = '/opt/local/etc/postfix/cacert.pem'
-when 'rhel'
- default['postfix']['cafile'] = '/etc/pki/tls/cert.pem'
-when 'amazon'
- default['postfix']['cafile'] = '/etc/pki/tls/cert.pem'
-when 'suse'
- default['postfix']['main']['setgid_group'] = 'maildrop'
- default['postfix']['main']['daemon_directory'] = '/usr/lib/postfix/bin'
-else
- default['postfix']['cafile'] = "#{node['postfix']['conf_dir']}/cacert.pem"
-end
-
-# # Default main.cf attributes according to `postconf -d`
-# default['postfix']['main']['relayhost'] = ''
-# default['postfix']['main']['milter_default_action'] = 'tempfail'
-# default['postfix']['main']['milter_protocol'] = '6'
-# default['postfix']['main']['smtpd_milters'] = ''
-# default['postfix']['main']['non_smtpd_milters'] = ''
-# default['postfix']['main']['sender_canonical_classes'] = nil
-# default['postfix']['main']['recipient_canonical_classes'] = nil
-# default['postfix']['main']['canonical_classes'] = nil
-# default['postfix']['main']['sender_canonical_maps'] = nil
-# default['postfix']['main']['recipient_canonical_maps'] = nil
-# default['postfix']['main']['canonical_maps'] = nil
-
-# Master.cf attributes
-default['postfix']['master']['smtp']['active'] = true
-default['postfix']['master']['smtp']['order'] = 10
-default['postfix']['master']['smtp']['type'] = 'inet'
-default['postfix']['master']['smtp']['private'] = false
-default['postfix']['master']['smtp']['chroot'] = false
-default['postfix']['master']['smtp']['command'] = 'smtpd'
-default['postfix']['master']['smtp']['args'] = []
-
-default['postfix']['master']['submission']['active'] = false
-default['postfix']['master']['submission']['order'] = 20
-default['postfix']['master']['submission']['type'] = 'inet'
-default['postfix']['master']['submission']['private'] = false
-default['postfix']['master']['submission']['chroot'] = false
-default['postfix']['master']['submission']['command'] = 'smtpd'
-default['postfix']['master']['submission']['args'] = ['-o smtpd_enforce_tls=yes', ' -o smtpd_sasl_auth_enable=yes', '-o smtpd_client_restrictions=permit_sasl_authenticated,reject']
-
-default['postfix']['master']['smtps']['active'] = false
-default['postfix']['master']['smtps']['order'] = 30
-default['postfix']['master']['smtps']['type'] = 'inet'
-default['postfix']['master']['smtps']['private'] = false
-default['postfix']['master']['smtps']['chroot'] = false
-default['postfix']['master']['smtps']['command'] = 'smtpd'
-default['postfix']['master']['smtps']['args'] = ['-o smtpd_tls_wrappermode=yes', '-o smtpd_sasl_auth_enable=yes', '-o smtpd_client_restrictions=permit_sasl_authenticated,reject']
-
-default['postfix']['master']['628']['active'] = false
-default['postfix']['master']['628']['order'] = 40
-default['postfix']['master']['628']['type'] = 'inet'
-default['postfix']['master']['628']['private'] = false
-default['postfix']['master']['628']['chroot'] = false
-default['postfix']['master']['628']['command'] = 'qmqpdd'
-default['postfix']['master']['628']['args'] = []
-
-default['postfix']['master']['pickup']['active'] = true
-default['postfix']['master']['pickup']['order'] = 50
-default['postfix']['master']['pickup']['type'] = 'fifo'
-default['postfix']['master']['pickup']['private'] = false
-default['postfix']['master']['pickup']['chroot'] = false
-default['postfix']['master']['pickup']['wakeup'] = '60'
-default['postfix']['master']['pickup']['maxproc'] = '1'
-default['postfix']['master']['pickup']['command'] = 'pickup'
-default['postfix']['master']['pickup']['args'] = []
-
-default['postfix']['master']['cleanup']['active'] = true
-default['postfix']['master']['cleanup']['order'] = 60
-default['postfix']['master']['cleanup']['type'] = 'unix'
-default['postfix']['master']['cleanup']['private'] = false
-default['postfix']['master']['cleanup']['chroot'] = false
-default['postfix']['master']['cleanup']['maxproc'] = '0'
-default['postfix']['master']['cleanup']['command'] = 'cleanup'
-default['postfix']['master']['cleanup']['args'] = []
-
-default['postfix']['master']['qmgr']['active'] = true
-default['postfix']['master']['qmgr']['order'] = 70
-default['postfix']['master']['qmgr']['type'] = 'fifo'
-default['postfix']['master']['qmgr']['private'] = false
-default['postfix']['master']['qmgr']['chroot'] = false
-default['postfix']['master']['qmgr']['wakeup'] = '300'
-default['postfix']['master']['qmgr']['maxproc'] = '1'
-default['postfix']['master']['qmgr']['command'] = 'qmgr'
-default['postfix']['master']['qmgr']['args'] = []
-
-default['postfix']['master']['tlsmgr']['active'] = true
-default['postfix']['master']['tlsmgr']['order'] = 80
-default['postfix']['master']['tlsmgr']['type'] = 'unix'
-default['postfix']['master']['tlsmgr']['chroot'] = false
-default['postfix']['master']['tlsmgr']['wakeup'] = '1000?'
-default['postfix']['master']['tlsmgr']['maxproc'] = '1'
-default['postfix']['master']['tlsmgr']['command'] = 'tlsmgr'
-default['postfix']['master']['tlsmgr']['args'] = []
-
-default['postfix']['master']['rewrite']['active'] = true
-default['postfix']['master']['rewrite']['order'] = 90
-default['postfix']['master']['rewrite']['type'] = 'unix'
-default['postfix']['master']['rewrite']['chroot'] = false
-default['postfix']['master']['rewrite']['command'] = 'trivial-rewrite'
-default['postfix']['master']['rewrite']['args'] = []
-
-default['postfix']['master']['bounce']['active'] = true
-default['postfix']['master']['bounce']['order'] = 100
-default['postfix']['master']['bounce']['type'] = 'unix'
-default['postfix']['master']['bounce']['chroot'] = false
-default['postfix']['master']['bounce']['maxproc'] = '0'
-default['postfix']['master']['bounce']['command'] = 'bounce'
-default['postfix']['master']['bounce']['args'] = []
-
-default['postfix']['master']['defer']['active'] = true
-default['postfix']['master']['defer']['order'] = 110
-default['postfix']['master']['defer']['type'] = 'unix'
-default['postfix']['master']['defer']['chroot'] = false
-default['postfix']['master']['defer']['maxproc'] = '0'
-default['postfix']['master']['defer']['command'] = 'bounce'
-default['postfix']['master']['defer']['args'] = []
-
-default['postfix']['master']['trace']['active'] = true
-default['postfix']['master']['trace']['order'] = 120
-default['postfix']['master']['trace']['type'] = 'unix'
-default['postfix']['master']['trace']['chroot'] = false
-default['postfix']['master']['trace']['maxproc'] = '0'
-default['postfix']['master']['trace']['command'] = 'bounce'
-default['postfix']['master']['trace']['args'] = []
-
-default['postfix']['master']['verify']['active'] = true
-default['postfix']['master']['verify']['order'] = 130
-default['postfix']['master']['verify']['type'] = 'unix'
-default['postfix']['master']['verify']['chroot'] = false
-default['postfix']['master']['verify']['maxproc'] = '1'
-default['postfix']['master']['verify']['command'] = 'verify'
-default['postfix']['master']['verify']['args'] = []
-
-default['postfix']['master']['flush']['active'] = true
-default['postfix']['master']['flush']['order'] = 140
-default['postfix']['master']['flush']['type'] = 'unix'
-default['postfix']['master']['flush']['private'] = false
-default['postfix']['master']['flush']['chroot'] = false
-default['postfix']['master']['flush']['wakeup'] = '1000?'
-default['postfix']['master']['flush']['maxproc'] = '0'
-default['postfix']['master']['flush']['command'] = 'flush'
-default['postfix']['master']['flush']['args'] = []
-
-default['postfix']['master']['proxymap']['active'] = true
-default['postfix']['master']['proxymap']['order'] = 150
-default['postfix']['master']['proxymap']['type'] = 'unix'
-default['postfix']['master']['proxymap']['chroot'] = false
-default['postfix']['master']['proxymap']['command'] = 'proxymap'
-default['postfix']['master']['proxymap']['args'] = []
-
-default['postfix']['master']['smtpunix']['service'] = 'smtp'
-default['postfix']['master']['smtpunix']['active'] = true
-default['postfix']['master']['smtpunix']['order'] = 160
-default['postfix']['master']['smtpunix']['type'] = 'unix'
-default['postfix']['master']['smtpunix']['chroot'] = false
-default['postfix']['master']['smtpunix']['maxproc'] = '500'
-default['postfix']['master']['smtpunix']['command'] = 'smtp'
-default['postfix']['master']['smtpunix']['args'] = []
-
-default['postfix']['master']['relay']['active'] = true
-default['postfix']['master']['relay']['comment'] = 'When relaying mail as backup MX, disable fallback_relay to avoid MX loops'
-default['postfix']['master']['relay']['order'] = 170
-default['postfix']['master']['relay']['type'] = 'unix'
-default['postfix']['master']['relay']['chroot'] = false
-default['postfix']['master']['relay']['command'] = 'smtp'
-default['postfix']['master']['relay']['args'] = ['-o smtp_fallback_relay=']
-
-default['postfix']['master']['showq']['active'] = true
-default['postfix']['master']['showq']['order'] = 180
-default['postfix']['master']['showq']['type'] = 'unix'
-default['postfix']['master']['showq']['private'] = false
-default['postfix']['master']['showq']['chroot'] = false
-default['postfix']['master']['showq']['command'] = 'showq'
-default['postfix']['master']['showq']['args'] = []
-
-default['postfix']['master']['error']['active'] = true
-default['postfix']['master']['error']['order'] = 190
-default['postfix']['master']['error']['type'] = 'unix'
-default['postfix']['master']['error']['chroot'] = false
-default['postfix']['master']['error']['command'] = 'error'
-default['postfix']['master']['error']['args'] = []
-
-default['postfix']['master']['discard']['active'] = true
-default['postfix']['master']['discard']['order'] = 200
-default['postfix']['master']['discard']['type'] = 'unix'
-default['postfix']['master']['discard']['chroot'] = false
-default['postfix']['master']['discard']['command'] = 'discard'
-default['postfix']['master']['discard']['args'] = []
-
-default['postfix']['master']['local']['active'] = true
-default['postfix']['master']['local']['order'] = 210
-default['postfix']['master']['local']['type'] = 'unix'
-default['postfix']['master']['local']['unpriv'] = false
-default['postfix']['master']['local']['chroot'] = false
-default['postfix']['master']['local']['command'] = 'local'
-default['postfix']['master']['local']['args'] = []
-
-default['postfix']['master']['virtual']['active'] = true
-default['postfix']['master']['virtual']['order'] = 220
-default['postfix']['master']['virtual']['type'] = 'unix'
-default['postfix']['master']['virtual']['unpriv'] = false
-default['postfix']['master']['virtual']['chroot'] = false
-default['postfix']['master']['virtual']['command'] = 'virtual'
-default['postfix']['master']['virtual']['args'] = []
-
-default['postfix']['master']['lmtp']['active'] = true
-default['postfix']['master']['lmtp']['order'] = 230
-default['postfix']['master']['lmtp']['type'] = 'unix'
-default['postfix']['master']['lmtp']['chroot'] = false
-default['postfix']['master']['lmtp']['command'] = 'lmtp'
-default['postfix']['master']['lmtp']['args'] = []
-
-default['postfix']['master']['anvil']['active'] = true
-default['postfix']['master']['anvil']['order'] = 240
-default['postfix']['master']['anvil']['type'] = 'unix'
-default['postfix']['master']['anvil']['chroot'] = false
-default['postfix']['master']['anvil']['maxproc'] = '1'
-default['postfix']['master']['anvil']['command'] = 'anvil'
-default['postfix']['master']['anvil']['args'] = []
-
-default['postfix']['master']['scache']['active'] = true
-default['postfix']['master']['scache']['order'] = 250
-default['postfix']['master']['scache']['type'] = 'unix'
-default['postfix']['master']['scache']['chroot'] = false
-default['postfix']['master']['scache']['maxproc'] = '1'
-default['postfix']['master']['scache']['command'] = 'scache'
-default['postfix']['master']['scache']['args'] = []
-
-default['postfix']['master']['maildrop']['active'] = true
-default['postfix']['master']['maildrop']['comment'] = 'See the Postfix MAILDROP_README file for details. To main.cf will be added: maildrop_destination_recipient_limit=1'
-default['postfix']['master']['maildrop']['order'] = 510
-default['postfix']['master']['maildrop']['type'] = 'unix'
-default['postfix']['master']['maildrop']['unpriv'] = false
-default['postfix']['master']['maildrop']['chroot'] = false
-default['postfix']['master']['maildrop']['command'] = 'pipe'
-default['postfix']['master']['maildrop']['args'] = ['flags=DRhu user=vmail argv=/usr/local/bin/maildrop -d ${recipient}']
-
-default['postfix']['master']['old-cyrus']['active'] = false
-default['postfix']['master']['old-cyrus']['comment'] = 'The Cyrus deliver program has changed incompatibly, multiple times.'
-default['postfix']['master']['old-cyrus']['order'] = 520
-default['postfix']['master']['old-cyrus']['type'] = 'unix'
-default['postfix']['master']['old-cyrus']['unpriv'] = false
-default['postfix']['master']['old-cyrus']['chroot'] = false
-default['postfix']['master']['old-cyrus']['command'] = 'pipe'
-default['postfix']['master']['old-cyrus']['args'] = ['flags=R user=cyrus argv=/usr/lib/cyrus-imapd/deliver -e -m ${extension} ${user}']
-
-default['postfix']['master']['cyrus']['active'] = true
-default['postfix']['master']['cyrus']['comment'] = 'Cyrus 2.1.5 (Amos Gouaux). To main.cf will be added: cyrus_destination_recipient_limit=1'
-default['postfix']['master']['cyrus']['order'] = 530
-default['postfix']['master']['cyrus']['type'] = 'unix'
-default['postfix']['master']['cyrus']['unpriv'] = false
-default['postfix']['master']['cyrus']['chroot'] = false
-default['postfix']['master']['cyrus']['command'] = 'pipe'
-default['postfix']['master']['cyrus']['args'] = ['user=cyrus argv=/usr/lib/cyrus-imapd/deliver -e -r ${sender} -m ${extension} ${user}']
-
-default['postfix']['master']['uucp']['active'] = true
-default['postfix']['master']['uucp']['comment'] = 'See the Postfix UUCP_README file for configuration details.'
-default['postfix']['master']['uucp']['order'] = 540
-default['postfix']['master']['uucp']['type'] = 'unix'
-default['postfix']['master']['uucp']['unpriv'] = false
-default['postfix']['master']['uucp']['chroot'] = false
-default['postfix']['master']['uucp']['command'] = 'pipe'
-default['postfix']['master']['uucp']['args'] = ['flags=Fqhu user=uucp argv=uux -r -n -z -a$sender - $nexthop!rmail ($recipient)']
-
-default['postfix']['master']['ifmail']['active'] = false
-default['postfix']['master']['ifmail']['order'] = 550
-default['postfix']['master']['ifmail']['type'] = 'unix'
-default['postfix']['master']['ifmail']['unpriv'] = false
-default['postfix']['master']['ifmail']['chroot'] = false
-default['postfix']['master']['ifmail']['command'] = 'pipe'
-default['postfix']['master']['ifmail']['args'] = ['flags=F user=ftn argv=/usr/lib/ifmail/ifmail -r $nexthop ($recipient)']
-
-default['postfix']['master']['bsmtp']['active'] = true
-default['postfix']['master']['bsmtp']['order'] = 560
-default['postfix']['master']['bsmtp']['type'] = 'unix'
-default['postfix']['master']['bsmtp']['unpriv'] = false
-default['postfix']['master']['bsmtp']['chroot'] = false
-default['postfix']['master']['bsmtp']['command'] = 'pipe'
-default['postfix']['master']['bsmtp']['args'] = ['flags=Fq. user=foo argv=/usr/local/sbin/bsmtp -f $sender $nexthop $recipient']
-
-# OS Aliases
-default['postfix']['aliases'] = if platform?('freebsd')
- {
- 'MAILER-DAEMON' => 'postmaster',
- 'bin' => 'root',
- 'daemon' => 'root',
- 'named' => 'root',
- 'nobody' => 'root',
- 'uucp' => 'root',
- 'www' => 'root',
- 'ftp-bugs' => 'root',
- 'postfix' => 'root',
- 'manager' => 'root',
- 'dumper' => 'root',
- 'operator' => 'root',
- 'abuse' => 'postmaster',
- }
- else
- {}
- end
-
-default['postfix']['main']['smtpd_relay_restrictions'] = "hash:#{node['postfix']['relay_restrictions_db']}, reject" if node['postfix']['use_relay_restrictions_maps']
diff --git a/lc-gdn-chef/cookbooks/postfix/chefignore b/lc-gdn-chef/cookbooks/postfix/chefignore
deleted file mode 100644
index cc170ea79ed8bde58bbb77030c0c2ab70b959c21..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen.yml*
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/postfix/kitchen.dokken.yml b/lc-gdn-chef/cookbooks/postfix/kitchen.dokken.yml
deleted file mode 100644
index 9947c080a6469e4abe27e0c69e2bc15b251ef52a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/kitchen.dokken.yml
+++ /dev/null
@@ -1,68 +0,0 @@
----
-driver:
- name: dokken
- privileged: true # because Docker and SystemD
- chef_version: <%= ENV['CHEF_VERSION'] || 'current' %>
- env: [CHEF_LICENSE=accept]
-
-transport:
- name: dokken
-
-provisioner:
- name: dokken
-
-platforms:
- - name: almalinux-8
- driver:
- image: dokken/almalinux-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: amazonlinux-2
- driver:
- image: dokken/amazonlinux-2
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: debian-10
- driver:
- image: dokken/debian-10
- pid_one_command: /bin/systemd
-
- - name: debian-11
- driver:
- image: dokken/debian-11
- pid_one_command: /bin/systemd
-
- - name: centos-7
- driver:
- image: dokken/centos-7
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: centos-stream-8
- driver:
- image: dokken/centos-stream-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: fedora-latest
- driver:
- image: dokken/fedora-latest
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: ubuntu-18.04
- driver:
- image: dokken/ubuntu-18.04
- pid_one_command: /bin/systemd
-
- - name: ubuntu-20.04
- driver:
- image: dokken/ubuntu-20.04
- pid_one_command: /bin/systemd
-
- - name: opensuse-leap-15
- driver:
- image: dokken/opensuse-leap-15
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: rockylinux-8
- driver:
- image: dokken/rockylinux-8
- pid_one_command: /usr/lib/systemd/systemd
diff --git a/lc-gdn-chef/cookbooks/postfix/metadata.json b/lc-gdn-chef/cookbooks/postfix/metadata.json
deleted file mode 100644
index 45127887f2708237c038b1b758a945cfae11d934..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/metadata.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "name": "postfix",
- "description": "Installs and configures postfix for client or outbound relayhost, or to do SASL auth",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "amazon": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "debian": ">= 0.0.0",
- "fedora": ">= 0.0.0",
- "freebsd": ">= 0.0.0",
- "oracle": ">= 0.0.0",
- "redhat": ">= 0.0.0",
- "scientific": ">= 0.0.0",
- "smartos": ">= 0.0.0",
- "ubuntu": ">= 0.0.0"
- },
- "dependencies": {
-
- },
- "providing": {
-
- },
- "recipes": {
-
- },
- "version": "6.0.7",
- "source_url": "https://github.com/sous-chefs/postfix",
- "issues_url": "https://github.com/sous-chefs/postfix/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 12.15"
- ]
- ],
- "ohai_versions": [
-
- ],
- "gems": [
-
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/postfix/metadata.rb b/lc-gdn-chef/cookbooks/postfix/metadata.rb
deleted file mode 100644
index 7a76487c3bc31d72c461c511a95069e327a26cd6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/metadata.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-name 'postfix'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-license 'Apache-2.0'
-description 'Installs and configures postfix for client or outbound relayhost, or to do SASL auth'
-version '6.0.7'
-source_url 'https://github.com/sous-chefs/postfix'
-issues_url 'https://github.com/sous-chefs/postfix/issues'
-chef_version '>= 12.15'
-
-supports 'amazon'
-supports 'centos'
-supports 'debian'
-supports 'fedora'
-supports 'freebsd'
-supports 'oracle'
-supports 'redhat'
-supports 'scientific'
-supports 'smartos'
-supports 'ubuntu'
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/_attributes.rb b/lc-gdn-chef/cookbooks/postfix/recipes/_attributes.rb
deleted file mode 100644
index 5e7449dd98f1557f7d697ce3fd4a5356611cdb00..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/_attributes.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-node.default_unless['postfix']['main']['mailbox_command'] = '/usr/bin/procmail -a "$EXTENSION"' if node['postfix']['use_procmail']
-
-if node['postfix']['main']['smtpd_use_tls'] == 'yes'
- node.default_unless['postfix']['main']['smtpd_tls_cert_file'] = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
- node.default_unless['postfix']['main']['smtpd_tls_key_file'] = '/etc/ssl/private/ssl-cert-snakeoil.key'
- node.default_unless['postfix']['main']['smtpd_tls_CAfile'] = node['postfix']['cafile']
- node.default_unless['postfix']['main']['smtpd_tls_session_cache_database'] = 'btree:${data_directory}/smtpd_scache'
-end
-
-if node['postfix']['main']['smtp_use_tls'] == 'yes'
- node.default_unless['postfix']['main']['smtp_tls_CAfile'] = node['postfix']['cafile']
- node.default_unless['postfix']['main']['smtp_tls_session_cache_database'] = 'btree:${data_directory}/smtp_scache'
-end
-
-if node['postfix']['main']['smtp_sasl_auth_enable'] == 'yes'
- node.default_unless['postfix']['sasl_password_file'] = "#{node['postfix']['conf_dir']}/sasl_passwd"
- node.default_unless['postfix']['main']['smtp_sasl_password_maps'] = "hash:#{node['postfix']['sasl_password_file']}"
- node.default_unless['postfix']['main']['smtp_sasl_security_options'] = 'noanonymous'
- node.default_unless['postfix']['sasl']['smtp_sasl_user_name'] = ''
- node.default_unless['postfix']['sasl']['smtp_sasl_passwd'] = ''
- node.default_unless['postfix']['main']['relayhost'] = ''
-end
-
-node.default_unless['postfix']['main']['alias_maps'] = ["hash:#{node['postfix']['aliases_db']}"] if node['postfix']['use_alias_maps']
-
-node.default_unless['postfix']['main']['transport_maps'] = ["hash:#{node['postfix']['transport_db']}"] if node['postfix']['use_transport_maps']
-
-node.default_unless['postfix']['main']['access_maps'] = ["hash:#{node['postfix']['access_db']}"] if node['postfix']['use_access_maps']
-
-node.default_unless['postfix']['main']['virtual_alias_maps'] = ["#{node['postfix']['virtual_alias_db_type']}:#{node['postfix']['virtual_alias_db']}"] if node['postfix']['use_virtual_aliases']
-
-node.default_unless['postfix']['main']['virtual_alias_domains'] = ["#{node['postfix']['virtual_alias_domains_db_type']}:#{node['postfix']['virtual_alias_domains_db']}"] if node['postfix']['use_virtual_aliases_domains']
-
-node.default_unless['postfix']['main']['smtpd_relay_restrictions'] = "hash:#{node['postfix']['relay_restrictions_db']}, reject" if node['postfix']['use_relay_restrictions_maps']
-
-node.default_unless['postfix']['main']['maildrop_destination_recipient_limit'] = 1 if node['postfix']['master']['maildrop']['active']
-
-node.default_unless['postfix']['main']['cyrus_destination_recipient_limit'] = 1 if node['postfix']['master']['cyrus']['active']
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/_common.rb b/lc-gdn-chef/cookbooks/postfix/recipes/_common.rb
deleted file mode 100644
index ab3aeab632560d107c844545ed727873cdd2e26d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/_common.rb
+++ /dev/null
@@ -1,198 +0,0 @@
-# Author:: Joshua Timberman()
-# Cookbook:: common
-# Recipe:: default
-#
-# Copyright:: 2009-2020, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_attributes'
-
-# use multi-package when we can
-if node['os'] == 'linux'
- package node['postfix']['packages']
-else
- node['postfix']['packages'].each do |pkg|
- package pkg
- end
-end
-
-package 'procmail' if node['postfix']['use_procmail']
-
-case node['platform_family']
-when 'rhel', 'fedora', 'amazon'
- service 'sendmail' do
- action :nothing
- end
-
- execute 'switch_mailer_to_postfix' do
- command '/usr/sbin/alternatives --set mta /usr/sbin/sendmail.postfix'
- notifies :stop, 'service[sendmail]'
- notifies :start, 'service[postfix]'
- not_if '/usr/bin/test /etc/alternatives/mta -ef /usr/sbin/sendmail.postfix'
- end
-when 'suse'
- file '/var/adm/postfix.configured'
-when 'omnios'
- manifest_path = ::File.join(Chef::Config[:file_cache_path], 'manifest-postfix.xml')
-
- # we need to manage the postfix group and user
- # and then subscribe to the package install because it creates a
- # postdrop group and adds postfix user to it.
- group 'postfix' do
- append true
- end
-
- user 'postfix' do
- uid node['postfix']['uid']
- gid 'postfix'
- home '/var/spool/postfix'
- subscribes :manage, 'package[postfix]'
- notifies :run, 'execute[/opt/omni/sbin/postfix set-permissions]', :immediately
- end
-
- # we don't guard this because if the user creation was successful (or happened out of band), then this won't get executed when the action is :nothing.
- execute '/opt/omni/sbin/postfix set-permissions'
-
- template manifest_path do
- source 'manifest-postfix.xml.erb'
- owner 'root'
- group node['root_group']
- mode '0644'
- notifies :run, 'execute[load postfix manifest]', :immediately
- end
-
- execute 'load postfix manifest' do
- action :nothing
- command "svccfg import #{manifest_path}"
- notifies :restart, 'service[postfix]' unless platform_family?('solaris2')
- end
-when 'freebsd'
- # Actions are based on docs provided by FreeBSD:
- # https://www.freebsd.org/doc/handbook/mail-changingmta.html
- service 'sendmail' do
- action :nothing
- end
-
- template '/etc/mail/mailer.conf' do
- source 'mailer.erb'
- owner 'root'
- group 0
- notifies :restart, 'service[postfix]' unless platform_family?('solaris2')
- end
-
- execute 'switch_mailer_to_postfix' do
- command [
- 'sysrc',
- 'sendmail_enable=NO',
- 'sendmail_submit_enable=NO',
- 'sendmail_outbound_enable=NO',
- 'sendmail_msp_queue_enable=NO',
- 'postfix_enable=YES',
- ]
- notifies :stop, 'service[sendmail]', :immediately
- notifies :disable, 'service[sendmail]', :immediately
- notifies :start, 'service[postfix]', :delayed
- only_if "sysrc sendmail_enable sendmail_submit_enable sendmail_outbound_enable sendmail_msp_queue_enable | egrep -q '(YES|unknown variable)' || sysrc postfix_enable | egrep -q '(NO|unknown variable)'"
- end
-
- execute 'disable_periodic' do
- # rubocop:disable Lint/ParenthesesAsGroupedExpression
- environment ({ 'RC_CONFS' => '/etc/periodic.conf' })
- command [
- 'sysrc',
- 'daily_clean_hoststat_enable=NO',
- 'daily_status_mail_rejects_enable=NO',
- 'daily_status_include_submit_mailq=NO',
- 'daily_submit_queuerun=NO',
- ]
- only_if "RC_CONFS=/etc/periodic.conf sysrc daily_clean_hoststat_enable daily_status_mail_rejects_enable daily_status_include_submit_mailq daily_submit_queuerun | egrep -q '(YES|unknown variable)'"
- end
-end
-
-# We need to write the config first as the below postmap immediately commands assume config is correct
-# Which is not the case as ipv6 is assumed to be available by the postfix package
-# And if someone wants to disable this first we need to update the config first aswell
-%w( main master ).each do |cfg|
- template "#{node['postfix']['conf_dir']}/#{cfg}.cf" do
- source "#{cfg}.cf.erb"
- owner 'root'
- group node['root_group']
- mode '0644'
- # restart service for solaris on chef-client has a bug
- # unless condition can be removed after
- # https://github.com/chef/chef/pull/6596 merge/release
- notifies :restart, 'service[postfix]' unless platform_family?('solaris2')
- variables(
- lazy { { settings: node['postfix'][cfg] } }
- )
- cookbook node['postfix']["#{cfg}_template_source"]
- end
-end
-
-execute 'update-postfix-sender_canonical' do
- command "postmap #{node['postfix']['conf_dir']}/sender_canonical"
- action :nothing
-end
-
-unless node['postfix']['sender_canonical_map_entries'].empty?
- template "#{node['postfix']['conf_dir']}/sender_canonical" do
- owner 'root'
- group node['root_group']
- mode '0644'
- notifies :run, 'execute[update-postfix-sender_canonical]', :immediately
- notifies :reload, 'service[postfix]'
- end
-
- node.default['postfix']['main']['sender_canonical_maps'] = "hash:#{node['postfix']['conf_dir']}/sender_canonical" unless node['postfix']['main'].key?('sender_canonical_maps')
-end
-
-execute 'update-postfix-smtp_generic' do
- command "postmap #{node['postfix']['conf_dir']}/smtp_generic"
- action :nothing
-end
-
-unless node['postfix']['smtp_generic_map_entries'].empty?
- template "#{node['postfix']['conf_dir']}/smtp_generic" do
- owner 'root'
- group node['root_group']
- mode '0644'
- notifies :run, 'execute[update-postfix-smtp_generic]', :immediately
- notifies :reload, 'service[postfix]'
- end
-
- node.default['postfix']['main']['smtp_generic_maps'] = "hash:#{node['postfix']['conf_dir']}/smtp_generic" unless node['postfix']['main'].key?('smtp_generic_maps')
-end
-
-execute 'update-postfix-recipient_canonical' do
- command "postmap #{node['postfix']['conf_dir']}/recipient_canonical"
- action :nothing
-end
-
-unless node['postfix']['recipient_canonical_map_entries'].empty?
- template "#{node['postfix']['conf_dir']}/recipient_canonical" do
- owner 'root'
- group node['root_group']
- mode '0644'
- notifies :run, 'execute[update-postfix-recipient_canonical]', :immediately
- notifies :reload, 'service[postfix]'
- end
-
- node.default['postfix']['main']['recipient_canonical_maps'] = "hash:#{node['postfix']['conf_dir']}/recipient_canonical" unless node['postfix']['main'].key?('recipient_canonical_maps')
-end
-
-service 'postfix' do
- supports status: true, restart: true, reload: true
- action [:enable, :start]
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/access.rb b/lc-gdn-chef/cookbooks/postfix/recipes/access.rb
deleted file mode 100644
index cfe5dbd941c4e7ced8c206f3b181e070bd4b4b28..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/access.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-execute 'update-postfix-access' do
- command "postmap #{node['postfix']['access_db']}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['access_db'] do
- source 'access.erb'
- notifies :run, 'execute[update-postfix-access]', :immediately
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/aliases.rb b/lc-gdn-chef/cookbooks/postfix/recipes/aliases.rb
deleted file mode 100644
index f8eec97512092671e0038b3fa112e39e01e27d26..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/aliases.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-execute 'update-postfix-aliases' do
- command 'newaliases'
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- # On FreeBSD, /usr/sbin/newaliases is the sendmail command, and it's in the path before postfix's /usr/local/bin/newaliases
- environment('PATH' => "/usr/local/bin:#{ENV['PATH']}") if platform_family?('freebsd')
- action :nothing
-end
-
-template node['postfix']['aliases_db'] do
- source 'aliases.erb'
- notifies :run, 'execute[update-postfix-aliases]', :immediately
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/client.rb b/lc-gdn-chef/cookbooks/postfix/recipes/client.rb
deleted file mode 100644
index b5009a68d70fe99a6dcf48f23b145ea190e28e2e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/client.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# Author:: Joshua Timberman()
-# Cookbook:: postfix
-# Recipe:: client
-#
-# Copyright:: 2009-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-if Chef::Config[:solo]
- Chef::Log.info("#{cookbook_name}::#{recipe_name} is intended for use with Chef Server, use #{cookbook_name}::default with Chef Solo.")
- return
-end
-
-query = "role:#{node['postfix']['relayhost_role']}"
-relayhost = ''
-# if the relayhost_port attribute is not port 25, append to the relayhost
-relayhost_port = node['postfix']['relayhost_port'].to_s != '25' ? ":#{node['postfix']['relayhost_port']}" : ''
-
-# results = []
-
-if node.run_list.roles.include?(node['postfix']['relayhost_role'])
- relayhost << node['ipaddress']
-elsif node['postfix']['multi_environment_relay']
- results = search(:node, query)
- relayhost = results.map { |n| n['ipaddress'] }.first
-else
- results = search(:node, "#{query} AND chef_environment:#{node.chef_environment}")
- relayhost = results.map { |n| n['ipaddress'] }.first
-end
-
-node.default['postfix']['main']['relayhost'] = "[#{relayhost}]#{relayhost_port}"
-
-include_recipe 'postfix'
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/default.rb b/lc-gdn-chef/cookbooks/postfix/recipes/default.rb
deleted file mode 100644
index 9fed0be8801042789fadc2ffb25b68122d302d3d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/default.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# Author:: Joshua Timberman()
-# Cookbook:: postfix
-# Recipe:: default
-#
-# Copyright:: 2009-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-include_recipe 'postfix::sasl_auth' if node['postfix']['main']['smtp_sasl_auth_enable'] == 'yes'
-
-include_recipe 'postfix::aliases' if node['postfix']['use_alias_maps']
-
-include_recipe 'postfix::transports' if node['postfix']['use_transport_maps']
-
-include_recipe 'postfix::access' if node['postfix']['use_access_maps']
-
-include_recipe 'postfix::virtual_aliases' if node['postfix']['use_virtual_aliases']
-
-include_recipe 'postfix::virtual_aliases_domains' if node['postfix']['use_virtual_aliases_domains']
-
-include_recipe 'postfix::relay_restrictions' if node['postfix']['use_relay_restrictions_maps']
-
-include_recipe 'postfix::maps' if node['postfix']['maps']
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/maps.rb b/lc-gdn-chef/cookbooks/postfix/recipes/maps.rb
deleted file mode 100644
index a1b612fda98a8adc7ff8b1d21fb05a5d03fd8e33..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/maps.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-node['postfix']['maps'].each do |type, maps|
- if platform_family?('debian')
- package "postfix-#{type}" if %w(pgsql mysql ldap cdb).include?(type)
- end
-
- separator = if %w(pgsql mysql ldap memcache sqlite).include?(type)
- ' = '
- else
- ' '
- end
- maps.each do |file, content|
- execute "update-postmap-#{file}" do
- command "postmap #{file}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
- end if %w(btree cdb dbm hash sdbm).include?(type)
- template "#{file}-#{type}" do
- path file
- source 'maps.erb'
- only_if "postconf -m | grep -q #{type}"
- variables(
- map: content,
- separator: separator
- )
- notifies :run, "execute[update-postmap-#{file}]" if %w(btree cdb dbm hash sdbm).include?(type)
- notifies :restart, 'service[postfix]'
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/relay_restrictions.rb b/lc-gdn-chef/cookbooks/postfix/recipes/relay_restrictions.rb
deleted file mode 100644
index 08aabcd81223d4b668a00b12abff0ea6234dc1c1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/relay_restrictions.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-postmap_command = platform_family?('rhel') ? '/usr/sbin/postmap' : 'postmap'
-
-execute 'update-postfix-relay-restrictions' do
- command "#{postmap_command} #{node['postfix']['relay_restrictions_db']}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['relay_restrictions_db'] do
- source 'relay_restrictions.erb'
- notifies :run, 'execute[update-postfix-relay-restrictions]', :immediately
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/sasl_auth.rb b/lc-gdn-chef/cookbooks/postfix/recipes/sasl_auth.rb
deleted file mode 100644
index bd9c18f59ce40d7238bd9ad4d7e963c9e2740934..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/sasl_auth.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-# Author:: Joshua Timberman()
-# Cookbook:: postfix
-# Recipe:: sasl_auth
-#
-# Copyright:: 2009-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-sasl_pkgs = []
-
-# We use case instead of value_for_platform_family because we need
-# version specifics for RHEL.
-case node['platform_family']
-when 'debian'
- sasl_pkgs = %w(libsasl2-2 libsasl2-modules ca-certificates)
-when 'rhel'
- sasl_pkgs = %w(cyrus-sasl cyrus-sasl-plain ca-certificates)
-when 'amazon'
- sasl_pkgs = %w(cyrus-sasl cyrus-sasl-plain ca-certificates)
-when 'fedora'
- sasl_pkgs = %w(cyrus-sasl cyrus-sasl-plain ca-certificates)
-end
-
-sasl_pkgs.each do |pkg|
- package pkg
-end
-
-execute 'postmap-sasl_passwd' do
- command "postmap #{node['postfix']['sasl_password_file']}"
- environment 'PATH' => "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['sasl_password_file'] do
- sensitive true
- source 'sasl_passwd.erb'
- owner 'root'
- group node['root_group']
- mode '400'
- notifies :run, 'execute[postmap-sasl_passwd]', :immediately
- notifies :restart, 'service[postfix]'
- variables(settings: node['postfix']['sasl'])
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/server.rb b/lc-gdn-chef/cookbooks/postfix/recipes/server.rb
deleted file mode 100644
index eaa14a60d4ce3d1463d5965182197b5e80bbf7c1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/server.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# Author:: Joshua Timberman()
-# Cookbook:: postfix
-# Recipe:: server
-#
-# Copyright:: 2009-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-node.override['postfix']['mail_type'] = 'master'
-node.override['postfix']['main']['inet_interfaces'] = 'all'
-
-include_recipe 'postfix'
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/transports.rb b/lc-gdn-chef/cookbooks/postfix/recipes/transports.rb
deleted file mode 100644
index 5436ae61bb9037cfab0f4c79b0571da4f7e19409..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/transports.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-postmap_command = platform_family?('rhel') ? '/usr/sbin/postmap' : 'postmap'
-
-execute 'update-postfix-transport' do
- command "#{postmap_command} #{node['postfix']['transport_db']}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['transport_db'] do
- source 'transport.erb'
- notifies :run, 'execute[update-postfix-transport]', :immediately
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases.rb b/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases.rb
deleted file mode 100644
index 704780720deefe543a17566d47b581d547c193d0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-execute 'update-postfix-virtual-alias' do
- command "postmap #{node['postfix']['virtual_alias_db']}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['virtual_alias_db'] do
- source 'virtual_aliases.erb'
- notifies :run, 'execute[update-postfix-virtual-alias]', :immediately
- notifies :restart, 'service[postfix]'
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases_domains.rb b/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases_domains.rb
deleted file mode 100644
index 3ded82df833f25bfefeae3cd7e8da93af4ae06e8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/recipes/virtual_aliases_domains.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright:: 2012-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'postfix::_common'
-
-execute 'update-postfix-virtual-alias-domains' do
- command "postmap #{node['postfix']['virtual_alias_domains_db']}"
- environment PATH: "#{ENV['PATH']}:/opt/omni/bin:/opt/omni/sbin" if platform_family?('omnios')
- action :nothing
-end
-
-template node['postfix']['virtual_alias_domains_db'] do
- source 'virtual_aliases_domains.erb'
- notifies :run, 'execute[update-postfix-virtual-alias-domains]', :immediately
- notifies :restart, 'service[postfix]'
-end
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/access.erb b/lc-gdn-chef/cookbooks/postfix/templates/access.erb
deleted file mode 100644
index d054fa93932e3bbc8b79802c4818982ab1d4066a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/access.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef. Do not hand edit!
-# Local modifications will be overwritten.
-#
-# See man 5 access for format
-
-<% node['postfix']['access'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['access'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/aliases.erb b/lc-gdn-chef/cookbooks/postfix/templates/aliases.erb
deleted file mode 100644
index 3093154d2c8667fb7797cefcfe3b0a699c615808..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/aliases.erb
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 aliases for format
-postmaster: root
-
-<% node['postfix']['aliases'].each do |name, value| %>
-<%= name %>: <%= [value].flatten.map{|x| if (x.include?("@")) then x else %Q("#{x}") end}.join(', ') %>
-<% end unless node['postfix']['aliases'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/mailer.erb b/lc-gdn-chef/cookbooks/postfix/templates/mailer.erb
deleted file mode 100644
index 186515785ad09288d3fbec1f46ebf1f9aaf20364..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/mailer.erb
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# Execute the Postfix sendmail program, named /usr/local/sbin/sendmail
-#
-sendmail /usr/local/sbin/sendmail
-send-mail /usr/local/sbin/sendmail
-mailq /usr/local/sbin/sendmail
-newaliases /usr/local/sbin/sendmail
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/main.cf.erb b/lc-gdn-chef/cookbooks/postfix/templates/main.cf.erb
deleted file mode 100644
index 57067d2fcdc588c29d19e98469789c41d0fedb10..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/main.cf.erb
+++ /dev/null
@@ -1,14 +0,0 @@
-###
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-# Configured as <%= node['postfix']['mail_type'] %>
-###
-
-<% @settings.sort.map do |key, value| -%>
-<% next if value.nil? -%>
-<% if value.kind_of? Array -%>
-<%= "#{key} = #{value.join(', ')}"%>
-<% else -%>
-<%= "#{key} = #{value}"%>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/manifest-postfix.xml.erb b/lc-gdn-chef/cookbooks/postfix/templates/manifest-postfix.xml.erb
deleted file mode 100644
index 8a1a77a84f790af66f532ca63ca1b857f3b7b108..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/manifest-postfix.xml.erb
+++ /dev/null
@@ -1,84 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Postfix Mailserver
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/maps.erb b/lc-gdn-chef/cookbooks/postfix/templates/maps.erb
deleted file mode 100644
index a351e82367e60252388f11c000c7da1523df6510..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/maps.erb
+++ /dev/null
@@ -1,8 +0,0 @@
-#
-# This file is generated by Chef.
-# Local changes will be overwritten
-#
-
-<% @map.each do |key, value| -%>
-<%= key %><%= @separator %><%= value %>
-<% end unless @map.nil? -%>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/master.cf.erb b/lc-gdn-chef/cookbooks/postfix/templates/master.cf.erb
deleted file mode 100644
index fb37a6b820ada246917f7a148ac2d8c265503d43..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/master.cf.erb
+++ /dev/null
@@ -1,67 +0,0 @@
-# This file is generated by Chef.
-# Local changes will be overwritten
-#
-# Postfix master process configuration file. For details on the format
-# of the file, see the master(5) manual page (command: "man 5 master").
-#
-# ==========================================================================
-# service type private unpriv chroot wakeup maxproc command + args
-# (yes) (yes) (yes) (never) (100)
-# ==========================================================================
-<% @settings.sort_by{|k,v| v['order']}.map do |service, properties| -%>
-<% next if !properties['active'] -%>
-<% if properties.has_key?('comment') -%>
-#
-#<%= properties['comment'] %>
-<% end -%>
-<% if properties.has_key?('service') -%>
-<%= properties['service'].ljust(10) -%>
-<% else -%>
-<%= service.ljust(10) -%>
-<% end -%>
- <%= properties['type'].ljust(6) -%>
-<% if properties.has_key?('private') -%>
-<% if properties['private'] -%>
-<% priv='y' -%>
-<% else -%>
-<% priv='n' -%>
-<% end -%>
-<% else -%>
-<% priv='-' -%>
-<% end -%>
-<%= priv.ljust(8) -%>
-<% if properties.has_key?('unpriv') -%>
-<% if properties['unpriv'] -%>
-<% unpriv='y' -%>
-<% else -%>
-<% unpriv='n' -%>
-<% end -%>
-<% else -%>
-<% unpriv='-' -%>
-<% end -%>
-<%= unpriv.ljust(8) -%>
-<% if properties.has_key?('chroot') -%>
-<% if properties['chroot'] -%>
-<% chroot='y' -%>
-<% else -%>
-<% chroot='n' -%>
-<% end -%>
-<% else -%>
-<% chroot='-' -%>
-<% end -%>
-<%= chroot.ljust(7) -%>
-<% if properties.has_key?('wakeup') -%>
- <%= properties['wakeup'].ljust(7) -%>
-<% else -%>
- <%= '-'.ljust(7) -%>
-<% end -%>
-<% if properties.has_key?('maxproc') -%>
- <%= properties['maxproc'].ljust(7) -%>
-<% else -%>
- <%= '-'.ljust(7) -%>
-<% end -%>
- <%= properties['command'] %>
-<% properties['args'].each do |arg| -%>
- <%= arg %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/port_smtp.erb b/lc-gdn-chef/cookbooks/postfix/templates/port_smtp.erb
deleted file mode 100644
index eb6aa804642698190e167dba109c73e07cdc3434..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/port_smtp.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-# SMTP
--A FWR -p tcp -m tcp --dport 25 -j ACCEPT
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/recipient_canonical.erb b/lc-gdn-chef/cookbooks/postfix/templates/recipient_canonical.erb
deleted file mode 100644
index bfff5c16b3858caf9057dc7dbd46889c1aeff556..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/recipient_canonical.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 canonical for format
-
-<% node['postfix']['recipient_canonical_map_entries'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['recipient_canonical_map_entries'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/relay_restrictions.erb b/lc-gdn-chef/cookbooks/postfix/templates/relay_restrictions.erb
deleted file mode 100644
index 85a9edaaa15dabc43578edf0c5c3ace656796230..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/relay_restrictions.erb
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# Attribute name is the domain name, Attribute value is either OK or REJECT
-
-<% node['postfix']['relay_restrictions'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['relay_restrictions'].nil? %>
-* REJECT
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/sasl_passwd.erb b/lc-gdn-chef/cookbooks/postfix/templates/sasl_passwd.erb
deleted file mode 100644
index 7ead10eb1ca7b0706ba8d16a97341be36624c18a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/sasl_passwd.erb
+++ /dev/null
@@ -1,8 +0,0 @@
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-
-<% if !@settings.nil? && !@settings.empty? -%>
-<% @settings.sort.map do |relayhost,value| -%>
-<%= relayhost %> <%= value['username'] %>:<%= value['password'] %>
-<% end -%>
-<% end -%>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/sender_canonical.erb b/lc-gdn-chef/cookbooks/postfix/templates/sender_canonical.erb
deleted file mode 100644
index 3eb25ef8b0b20ea55492ff9d4d2ee530755f6cc3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/sender_canonical.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 canonical for format
-
-<% node['postfix']['sender_canonical_map_entries'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['sender_canonical_map_entries'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/smtp_generic.erb b/lc-gdn-chef/cookbooks/postfix/templates/smtp_generic.erb
deleted file mode 100644
index 47b36d2d1053e87743535523fe972fb52a147e83..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/smtp_generic.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 generic for format
-
-<% node['postfix']['smtp_generic_map_entries'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['smtp_generic_map_entries'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/transport.erb b/lc-gdn-chef/cookbooks/postfix/templates/transport.erb
deleted file mode 100644
index df31153e556a7ccd8a4a3520ff43e5dbd1e2fa44..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/transport.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 transport for format
-
-<% node['postfix']['transports'].each do |name, value| %>
-<%= name %> <%= value %>
-<% end unless node['postfix']['transports'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases.erb b/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases.erb
deleted file mode 100644
index d1bfadfa740014d5ad63e509ce1c82ac5f6f7d7f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 virtual for format
-
-<% node['postfix']['virtual_aliases'].each do |key, value| %>
-<%= key %> <%= value %>
-<% end unless node['postfix']['virtual_aliases'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases_domains.erb b/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases_domains.erb
deleted file mode 100644
index d4334b2c9f12f374a474c32749ad44fceee0bb6b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/postfix/templates/virtual_aliases_domains.erb
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Auto-generated by Chef.
-# Local modifications will be overwritten.
-#
-# See man 5 virtual for format
-
-<% node['postfix']['virtual_aliases_domains'].each do |key, value| %>
-<%= key %> <%= value %>
-<% end unless node['postfix']['virtual_aliases_domains'].nil? %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/.foodcritic b/lc-gdn-chef/cookbooks/prometheus/.foodcritic
deleted file mode 100644
index 847f0cb63723a093a615d16c6b51321d19432fed..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.foodcritic
+++ /dev/null
@@ -1 +0,0 @@
-~FC033
diff --git a/lc-gdn-chef/cookbooks/prometheus/.gitignore b/lc-gdn-chef/cookbooks/prometheus/.gitignore
deleted file mode 100644
index 024f1e115ac44d722f0e8dccf8d54fcc1288e89b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.gitignore
+++ /dev/null
@@ -1,21 +0,0 @@
-*~
-*#
-.#*
-\#*#
-.*.sw[a-z]
-*.un~
-pkg/
-
-# Berkshelf
-.vagrant
-/cookbooks
-Berksfile.lock
-
-# Bundler
-Gemfile.lock
-bin/*
-.bundle/*
-
-.kitchen/
-.kitchen.local.yml
-.idea
diff --git a/lc-gdn-chef/cookbooks/prometheus/.hound.yml b/lc-gdn-chef/cookbooks/prometheus/.hound.yml
deleted file mode 100644
index eddba4fa0b5467b41ca465f291e90f723ec93b05..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.hound.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-ruby:
- config_file: .rubocop.yml
-
-java_script:
- enabled: false
-
-scss:
- enabled: false
diff --git a/lc-gdn-chef/cookbooks/prometheus/.kitchen.dokken.yml b/lc-gdn-chef/cookbooks/prometheus/.kitchen.dokken.yml
deleted file mode 100644
index 561d4bafee1b07b0d615df499649aa04395f6ca8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.kitchen.dokken.yml
+++ /dev/null
@@ -1,93 +0,0 @@
-driver:
- name: dokken
- privileged: true # because Docker and SystemD/Upstart
- chef_version: 12.21.31
- #chef_version: <%= ENV['CHEF_VERSION'] || 'current' %>
-
-transport:
- name: dokken
-
-provisioner:
- name: dokken
- data_path: test/shared
-
-verifier:
- sudo: false
-
-platforms:
- - name: ubuntu-14.04
- - name: ubuntu-16.04
- driver:
- image: dokken/ubuntu-16.04
- pid_one_command: /bin/systemd
- intermediate_instructions:
- - RUN /usr/bin/apt-get update
- - RUN /usr/bin/apt install sudo
- # - name: debian-7.11
- # driver:
- # cache_directory: false
- - name: debian-8.7
- driver:
- image: dokken/debian-8
- pid_one_command: /bin/systemd
- - name: centos-6.9
- run_list:
- - recipe[yum::default]
- - name: centos-7.3
- run_list:
- - recipe[yum::default]
-
-suites:
- - name: default
- run_list:
- - recipe[prometheus::default]
- - recipe[prometheus::alertmanager]
- attributes:
- prometheus:
- install_method: 'binary'
- alertmanager:
- install_method: 'binary'
-
- - name: init-binary
- run_list:
- - recipe[prometheus::default]
- attributes:
- prometheus:
- init_style: 'init'
- install_method: 'binary'
- flags:
- storage.remote.timeout: 45s
-
- - name: default-source
- run_list:
- - recipe[prometheus::default]
- attributes:
- go:
- gobin: '/usr/local/go/bin'
- gopath: '/usr/local/go'
-
- - name: init
- run_list:
- - recipe[prometheus::default]
- provisioner:
- name: chef_zero
- require_chef_omnibus: 11.12.4
- attributes:
- prometheus:
- init_style: 'init'
-
-# - name: bluepill
-# run_list:
-# - recipe[prometheus::default]
-# attributes:
-# prometheus:
-# init_style: 'bluepill'
-
-# - name: bluepill-binary
-# run_list:
-# - recipe[prometheus::default]
-# attributes:
-# prometheus:
-# init_style: 'bluepill'
-# install_method: 'binary'
-
diff --git a/lc-gdn-chef/cookbooks/prometheus/.kitchen.yml b/lc-gdn-chef/cookbooks/prometheus/.kitchen.yml
deleted file mode 100644
index e642e733c4b46560aa246b6095bca01eece82c66..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.kitchen.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-driver:
- name: vagrant
-
-provisioner:
- name: chef_zero
- data_path: test/shared
-
-platforms:
- - name: ubuntu-14.04
- - name: ubuntu-16.04
- - name: debian-7.11
- driver:
- cache_directory: false
- - name: debian-8.7
- - name: centos-6.9
- run_list:
- - recipe[yum::default]
- - name: centos-7.3
- run_list:
- - recipe[yum::default]
-
-suites:
- - name: default
- run_list:
- - recipe[prometheus::default]
- - recipe[prometheus::alertmanager]
- attributes:
- prometheus:
- install_method: 'binary'
- alertmanager:
- install_method: 'binary'
-
- - name: init-binary
- run_list:
- - recipe[prometheus::default]
- attributes:
- prometheus:
- init_style: 'init'
- install_method: 'binary'
- flags:
- storage.remote.timeout: 45s
-
- - name: default-source
- run_list:
- - recipe[prometheus::default]
- attributes:
- go:
- gobin: '/usr/local/go/bin'
- gopath: '/usr/local/go'
-
- - name: init
- run_list:
- - recipe[prometheus::default]
- provisioner:
- name: chef_zero
- require_chef_omnibus: 11.12.4
- attributes:
- prometheus:
- init_style: 'init'
-
-# - name: bluepill
-# run_list:
-# - recipe[prometheus::default]
-# attributes:
-# prometheus:
-# init_style: 'bluepill'
-
-# - name: bluepill-binary
-# run_list:
-# - recipe[prometheus::default]
-# attributes:
-# prometheus:
-# init_style: 'bluepill'
-# install_method: 'binary'
-
diff --git a/lc-gdn-chef/cookbooks/prometheus/.rubocop.yml b/lc-gdn-chef/cookbooks/prometheus/.rubocop.yml
deleted file mode 100644
index 92b417ea8ca072229f16fe39e42b493242ed4933..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.rubocop.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-AllCops:
- Exclude:
- - vendor/**/*
- - .kitchen/**/*
- - Vagrantfile
-
-ClassLength:
- Enabled: false
-Documentation:
- Enabled: false
-Encoding:
- Enabled: false
-LineLength:
- Enabled: false
-MethodLength:
- Enabled: false
-NumericLiterals:
- Enabled: false
-Style/StringLiterals:
- Enabled: true
- EnforcedStyle: single_quotes
-Metrics/LineLength:
- Enabled: false
-Style/ExtraSpacing:
- Enabled: false
-Style/SpaceBeforeFirstArg:
- Enabled: false
-Style/FrozenStringLiteralComment:
- Enabled: false
-Lint/AmbiguousBlockAssociation:
- Enabled: false
-Style/PercentLiteralDelimiters:
- Enabled: false
-Style/SymbolArray:
- Enabled: false
-Style/TrailingCommaInArrayLiteral:
- EnforcedStyleForMultiline: no_comma
diff --git a/lc-gdn-chef/cookbooks/prometheus/.travis.yml b/lc-gdn-chef/cookbooks/prometheus/.travis.yml
deleted file mode 100644
index 4fcc2becc6a640cf92851751d462e6048e63454e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/.travis.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-dist: trusty
-sudo: required
-addons:
- apt:
- sources:
- - chef-current-trusty
- packages:
- - chefdk
-
-# Don't `bundle install`
-install: echo "skip bundle install"
-
-services: docker
-
-# Ensure we make ChefDK's Ruby the default
-before_script:
- - sudo iptables -L DOCKER || ( echo "DOCKER iptables chain missing" ; sudo iptables -N DOCKER )
- - eval "$(chef shell-init bash)"
- - chef --version
-
-script:
- - cookstyle
- - foodcritic .
- - KITCHEN_LOCAL_YAML=.kitchen.dokken.yml chef exec kitchen verify
diff --git a/lc-gdn-chef/cookbooks/prometheus/Berksfile b/lc-gdn-chef/cookbooks/prometheus/Berksfile
deleted file mode 100644
index 34fea2166bafb6b6a62488907c532a9d4692d35e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/Berksfile
+++ /dev/null
@@ -1,3 +0,0 @@
-source 'https://supermarket.chef.io'
-
-metadata
diff --git a/lc-gdn-chef/cookbooks/prometheus/CHANGELOG.md b/lc-gdn-chef/cookbooks/prometheus/CHANGELOG.md
deleted file mode 100644
index d113da7721171e9719204aa80a3a8492fa24cab8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/CHANGELOG.md
+++ /dev/null
@@ -1,75 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file.
-
-## [Unreleased][unreleased]
-
-### Changed
-
-- Updated attributes and templates for Prometheus 0.15 release.
-
-### Added
-
-- Added upstart init for ubuntu platform.
-
-## [0.5.1] - 2015-03-25
-
-### Changed
-
-- Updated documentation.
-
-## [0.5.0] - 2015-03-25
-
-### Added
-
-- Added systemd init for redhat platform family version 7 or greater.
-- Default init style per platform.
-- Install Prometheus via pre-compiled binary.
-- Added the prometheus_job resource for defining Prometheus scraping jobs.
-- Attribute flag to externally manage prometheus.conf file.
-
-### Changed
-
-- Removed flags that were deprecated in the prometheus 0.12.0 release.
-
-### Contributors for this release:
-
-- [Eric Richardson](https://github.com/ewr) - External jobs config and prometheus job resource.
-
-Thank You!
-
-## [0.4.0] - 2015-03-12
-
-### Fixed
-
-- Fix init template path bug on chef 11.x.
-
-## [0.3.0] - 2015-03-11
-
-### Fixed
-
-- Fixed cookbook badge in README
-
-## [0.2.0] - 2015-03-11
-
-### Fixed
-
-- License defined in metadata.
-
-## 0.1.0 - 2015-03-11
-
-### Changed
-
-- Initial release of prometheus cookbook
-
-[unreleased]: https://github.com/rayrod2030/chef-prometheus/compare/0.5.1...HEAD
-
-[0.5.1]: https://github.com/rayrod2030/chef-prometheus/compare/0.5.0...0.5.1
-
-[0.5.0]: https://github.com/rayrod2030/chef-prometheus/compare/0.4.0...0.5.0
-
-[0.4.0]: https://github.com/rayrod2030/chef-prometheus/compare/0.3.0...0.4.0
-
-[0.3.0]: https://github.com/rayrod2030/chef-prometheus/compare/0.2.0...0.3.0
-
-[0.2.0]: https://github.com/rayrod2030/chef-prometheus/compare/0.1.0...0.2.0
diff --git a/lc-gdn-chef/cookbooks/prometheus/CONTRIBUTING.md b/lc-gdn-chef/cookbooks/prometheus/CONTRIBUTING.md
deleted file mode 100644
index c12baec6366c4720eb683a65565d2386d0e3d4bd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/CONTRIBUTING.md
+++ /dev/null
@@ -1,51 +0,0 @@
-Contributing to the prometheus Cookbook
-=========================================
-The prometheus cookbook uses [Github][] to triage, manage, and track issues and changes to the cookbook.
-
-Everybody is welcome to submit patches, but we ask you keep the following guidelines in mind:
-
-- [Coding Standards](#coding-standards)
-- [Testing](#testing)
-
-Coding Standards
-----------------
-The submitted code should be compatible with the standard Ruby coding guidelines. Here are some additional resources:
-
-- [Ruby Style Guide](https://github.com/bbatsov/ruby-style-guide)
-- [GitHub Styleguide](https://github.com/styleguide/ruby)
-
-This cookbook is equipped with Rubocop, which will fail the build for violating these standards.
-
-Testing
--------
-Whether your pull request is a bug fix or introduces new classes or methods to the project, we kindly ask that you
-include tests for your changes. Even if it's just a small improvement, a test is necessary to ensure the bug is never
-re-introduced.
-
-We understand that not all users are familiar with the testing ecosystem. This cookbook is fully-tested
-using [Foodcritic](https://github.com/acrmp/foodcritic),
-[Rubocop](https://github.com/bbatsov/rubocop), and [Test Kitchen](https://github.com/test-kitchen/test-kitchen) with
-[Serverspec](https://github.com/serverspec/serverspec) bussers.
-
-Process
--------
-
-1. Clone the git repository from GitHub:
-
- $ git clone git@github.com:rayrod2030/chef-prometheus.git
-
-2. Make sure you have a sane [ChefDK][] development environment:
-
- $ chef version
-
-3. Make any changes
-4. Write tests to support those changes.
-5. Run the tests:
-
- $ kitchen verify
-
-6. Assuming the tests pass, commit your changes
-
-[ChefDK]: https://downloads.chef.io/chef-dk/
-
-[github]: https://github.com/rayrod2030/chef-prometheus/issues
diff --git a/lc-gdn-chef/cookbooks/prometheus/Gemfile b/lc-gdn-chef/cookbooks/prometheus/Gemfile
deleted file mode 100644
index b59a528583e8634b00317b35d3c8e145d6770172..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/Gemfile
+++ /dev/null
@@ -1,36 +0,0 @@
-source 'https://rubygems.org'
-
-group :unit do
- gem 'berkshelf', '~> 6.3.1'
- gem 'chefspec', '~> 7.1.1'
-end
-
-group :lint do
- gem 'foodcritic', '~> 12.2.1'
- gem 'rubocop', '~> 0.49.1'
- gem 'cookstyle', '~> 2.1.0'
-end
-
-group :kitchen_common do
- gem 'test-kitchen', '~> 1.19.2'
-end
-
-group :kitchen_docker do
- gem 'kitchen-docker', '~> 2.6.0'
-end
-
-group :kitchen_vagrant do
- gem 'kitchen-vagrant', '~> 1.2.1'
-end
-
-group :kitchen_cloud do
- gem 'kitchen-ec2'
-end
-
-group :integration do
- gem 'serverspec'
-end
-
-group :development do
- gem 'rake'
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/ISSUES.md b/lc-gdn-chef/cookbooks/prometheus/ISSUES.md
deleted file mode 100644
index 36f81fc2ed302121f8c30ae547f1176f35eeb5f9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/ISSUES.md
+++ /dev/null
@@ -1,42 +0,0 @@
-prometheus Cookbook Issues
-============================
-This file documents the steps necessary to report any issue with the prometheus cookbook. Following these guidelines
-will help ensure your issue is resolved in a timely manner.
-
-Reporting
----------
-When you report an issue, please include the following information:
-
-- A high-level overview of what you are trying to accomplish
-- An [SSCCE](http://sscce.org/)
-- The command you ran
-- What you expected to happen
-- What actually happened
-- The exception backtrace(s), if any
-- What operating system and version
-- Everything output by running `env`
-- What version of the cookbook are you using?
-- What version of Ruby you are using (run `ruby -v`)
-- What version of Rubygems you are using (run `gem -v`)
-- What version of Chef you are using (run `knife -v`)
-
-Here's a snippet you can copy-paste into the issue and fill out:
-
-```text
-(What is the issue? What are you trying to do? What happened?)
-
-- Command: `...`
-- OS:
-- Cookbook Version:
-- Ruby Version:
-- Rubygems Version:
-- Chef Version:
-- env:
- ```text
- # Paste your env here
- ```
-- Backtrace:
- ```text
- # Paste backtrace here
- ```
-```
diff --git a/lc-gdn-chef/cookbooks/prometheus/LICENSE b/lc-gdn-chef/cookbooks/prometheus/LICENSE
deleted file mode 100644
index 11069edd79019f7dafbe3138841cf289209270dd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/prometheus/README.md b/lc-gdn-chef/cookbooks/prometheus/README.md
deleted file mode 100644
index 63946999d89251453b36ea01bdf02f4987a7cb8f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/README.md
+++ /dev/null
@@ -1,176 +0,0 @@
-prometheus Cookbook
-=====================
-[](https://github.com/rayrod2030/chef-prometheus)
-[](https://travis-ci.org/rayrod2030/chef-prometheus?branch=master)
-[](https://gitter.im/rayrod2030/chef-prometheus)
-
-This cookbook installs the [Prometheus][] monitoring system and time-series database.
-
-Requirements
-------------
-
-- Chef 12 or higher
-- Ruby 2.2 or higher
-
-Platform
---------
-Tested on
-
-* Ubuntu 14.04
-* Ubuntu 12.04
-* Debian 7.7
-* Centos 6.6
-* Centos 7.0
-
-Attributes
-----------
-In order to keep the README managable and in sync with the attributes, this cookbook documents attributes inline. The
-usage instructions and default values for attributes can be found in the individual attribute files.
-
-Recipes
--------
-
-### default
-
-The `default` recipe installs creates all the default [Prometheus][] directories, config files and and users. Default
-also calls the configured `install_method`
-recipe and finally calls the prometheus `service` recipe.
-
-### source
-
-The `source` recipe builds Prometheus from a Github source tag.
-
-### binary
-
-The `binary` recipe retrieves and installs a pre-compiled Prometheus build from a user-defined location.
-
-### service
-
-The `service` recipe configures Prometheus to run under a process supervisor. Default supervisors are chosen based on
-distribution. Currently supported supervisors are init, runit, systemd, upstart and bluepill.
-
-Resource/Provider
------------------
-
-### prometheus_job
-
-This resource adds a job definition to the Prometheus config file. Here is an example of using this resource to define
-the default Prometheus job:
-
-```ruby
-prometheus_job 'prometheus' do
- scrape_interval '15s'
- target "http://localhost#{node['prometheus']['flags']['web.listen-address']}#{node['prometheus']['flags']['web.telemetry-path']}"
-end
-```
-
-Note: This cookbook uses the accumulator pattern so you can define multiple prometheus_job’s and they will all be added
-to the Prometheus config.
-
-Externally managing `prometheus.conf`
--------------------------------------
-
-If you prefer to manage your `prometheus.conf` file externally using your own inventory or service discovery mechanism
-you can set
-`default['prometheus']['allow_external_config']` to `true`.
-
-Dependencies
-------------
-
-The following cookbooks are dependencies:
-
-* [build-essential][]
-* [apt][]
-* [yum][]
-* [runit][]
-* [bluepill][]
-* [accumulator][]
-* [ark][]
-
-## Usage
-
-### prometheus::default
-
-Include `prometheus` in your node's `run_list` to execute the standard deployment of prometheus:
-
-```json
-{
- "run_list": [
- "recipe[prometheus::default]"
- ]
-}
-```
-
-### prometheus::use_lwrp
-
-Used to load promethus cookbook from wrapper cookbook.
-
-`prometheus::use_lwrp` doesn't do anything other than allow you to include the Prometheus cookbook into your wrapper or
-app cookbooks. Doing this allows you to override prometheus attributes and use the prometheus LWRP (`prometheus_job`) in
-your wrapper cookbooks.
-
-```ruby
-# Load the promethues cookbook into your wrapper so you have access to the LWRP and attributes
-
-include_recipe "prometheus::use_lwrp"
-
-# Add a rule filename under `rule_files` in prometheus.yml.erb
-node.set['prometheus']['rule_filenames'] = ["#{node['prometheus']['dir']}/alert.rules"]
-
-# Example of using search to populate prometheus.yaml jobs using the prometheus_job LWRP
-# Finds all the instances that are in the current environment and are taged with "node_exporter"
-# Assumes that the service instances were tagged in their own recipes.
-client_servers = search(:node, "environment:#{node.chef_environment} AND tags:node_exporter")
-
-# Assumes service_name is an attribute of each node
-client_servers.each do |server|
- prometheus_job server.service_name do
- scrape_interval ‘15s’
- target “#{server.fqdn}#{node[‘prometheus’][‘flags’][‘web.listen-address’]}"
- metrics_path "#{node[‘prometheus’][‘flags’][‘web.telemetry-path’]}”
- end
-end
-
-# Now run the default recipe that does all the work configuring and deploying prometheus
-include_recipe "prometheus::default"
-```
-
-Development
------------
-Please see the [Contributing](CONTRIBUTING.md) and [Issue Reporting](ISSUES.md) Guidelines.
-
-License & Authors
-------
-
-- Author: Ray Rodriguez
-- Author: kristian järvenpää
-
-```text
-Licensed under the Apache License, Version 2.0 (the “License”);
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an “AS IS” BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-```
-
-[build-essential]: https://github.com/opscode-cookbooks/build-essential
-
-[apt]: https://github.com/opscode-cookbooks/apt
-
-[runit]: https://github.com/hw-cookbooks/runit
-
-[Prometheus]: https://github.com/prometheus/prometheus
-
-[bluepill]: https://github.com/opscode-cookbooks/bluepill
-
-[ark]: https://github.com/burtlo/ark
-
-[yum]: https://github.com/chef-cookbooks/yum
-
-[accumulator]: https://github.com/kisoku/chef-accumulator
diff --git a/lc-gdn-chef/cookbooks/prometheus/Rakefile b/lc-gdn-chef/cookbooks/prometheus/Rakefile
deleted file mode 100644
index c89ac58b334182eabbdf5d529194f080dd89ec4c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/Rakefile
+++ /dev/null
@@ -1,45 +0,0 @@
-require 'bundler/setup'
-require 'cookstyle'
-require 'rubocop/rake_task'
-require 'foodcritic'
-require 'kitchen'
-require 'rspec/core/rake_task'
-
-# Unit Tests. rspec/chefspec
-RSpec::Core::RakeTask.new(:unit)
-
-# Style tests. Rubocop and Foodcritic
-namespace :style do
- desc 'Run Ruby style checks'
- RuboCop::RakeTask.new(:ruby)
-
- desc 'Run Chef style checks'
- FoodCritic::Rake::LintTask.new(:chef) do |t|
- t.options = {
- fail_tags: ['any']
- }
- end
-end
-
-desc 'Run all style checks'
-task style: ['style:chef', 'style:ruby']
-
-# Integration tests. Kitchen.ci
-namespace :integration do
- desc 'Run Test Kitchen with Vagrant'
- task :vagrant do
- Kitchen.logger = Kitchen.default_file_logger
- Kitchen::Config.new.instances.each do |instance|
- instance.test(:always)
- end
- end
-end
-
-# We cannot run Test Kitchen on Travis CI yet...
-namespace :travis do
- desc 'Run tests on Travis'
- task ci: %w(style unit)
-end
-
-# The default rake task should just run it all
-task default: ['travis:ci', 'integration']
diff --git a/lc-gdn-chef/cookbooks/prometheus/Thorfile b/lc-gdn-chef/cookbooks/prometheus/Thorfile
deleted file mode 100644
index 19aaaf2fdb2e82f9bd3a5b4d41911f677084e221..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/Thorfile
+++ /dev/null
@@ -1,12 +0,0 @@
-# encoding: utf-8
-
-require 'bundler'
-require 'bundler/setup'
-require 'berkshelf/thor'
-
-begin
- require 'kitchen/thor_tasks'
- Kitchen::ThorTasks.new
-rescue LoadError
- puts '>>>>> Kitchen gem not loaded, omitting tasks' unless ENV['CI']
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/Vagrantfile b/lc-gdn-chef/cookbooks/prometheus/Vagrantfile
deleted file mode 100644
index d2c6b39da8b802623c55540e8051fa21747fb625..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/Vagrantfile
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
-VAGRANTFILE_API_VERSION = '2'.freeze
-
-Vagrant.require_version '>= 1.5.0'
-
-Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # All Vagrant configuration is done here. The most common configuration
- # options are documented and commented below. For a complete reference,
- # please see the online documentation at vagrantup.com.
-
- config.vm.hostname = 'prometheus-berkshelf'
-
- # Set the version of chef to install using the vagrant-omnibus plugin
- # NOTE: You will need to install the vagrant-omnibus plugin:
- #
- # $ vagrant plugin install vagrant-omnibus
- #
- if Vagrant.has_plugin?('vagrant-omnibus')
- config.omnibus.chef_version = 'latest'
- end
-
- # Every Vagrant virtual environment requires a box to build off of.
- # If this value is a shorthand to a box in Vagrant Cloud then
- # config.vm.box_url doesn't need to be specified.
- config.vm.box = 'bento/ubuntu-14.04'
-
- # Assign this VM to a host-only network IP, allowing you to access it
- # via the IP. Host-only networks can talk to the host machine as well as
- # any other machines on the same network, but cannot be accessed (through this
- # network interface) by any external networks.
- config.vm.network :private_network, type: 'dhcp'
-
- # Create a forwarded port mapping which allows access to a specific port
- # within the machine from a port on the host machine. In the example below,
- # accessing "localhost:8080" will access port 80 on the guest machine.
-
- # Share an additional folder to the guest VM. The first argument is
- # the path on the host to the actual folder. The second argument is
- # the path on the guest to mount the folder. And the optional third
- # argument is a set of non-required options.
- # config.vm.synced_folder "../data", "/vagrant_data"
-
- # Provider-specific configuration so you can fine-tune various
- # backing providers for Vagrant. These expose provider-specific options.
- # Example for VirtualBox:
- #
- # config.vm.provider :virtualbox do |vb|
- # # Don't boot with headless mode
- # vb.gui = true
- #
- # # Use VBoxManage to customize the VM. For example to change memory:
- # vb.customize ["modifyvm", :id, "--memory", "1024"]
- # end
- #
- # View the documentation for the provider you're using for more
- # information on available options.
-
- # The path to the Berksfile to use with Vagrant Berkshelf
- # config.berkshelf.berksfile_path = "./Berksfile"
-
- # Enabling the Berkshelf plugin. To enable this globally, add this configuration
- # option to your ~/.vagrant.d/Vagrantfile file
- config.berkshelf.enabled = true
-
- # An array of symbols representing groups of cookbook described in the Vagrantfile
- # to exclusively install and copy to Vagrant's shelf.
- # config.berkshelf.only = []
-
- # An array of symbols representing groups of cookbook described in the Vagrantfile
- # to skip installing and copying to Vagrant's shelf.
- # config.berkshelf.except = []
-
- config.vm.provision :chef_solo do |chef|
- chef.json = {
- mysql: {
- server_root_password: 'rootpass',
- server_debian_password: 'debpass',
- server_repl_password: 'replpass'
- }
- }
-
- chef.run_list = [
- 'recipe[prometheus::default]',
- 'recipe[prometheus::alertmanager]'
- ]
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/attributes/default.rb b/lc-gdn-chef/cookbooks/prometheus/attributes/default.rb
deleted file mode 100644
index 508620febfa5467e1b75aeddc3349e0c60332414..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/attributes/default.rb
+++ /dev/null
@@ -1,274 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Attributes:: default
-#
-
-# Directory where the prometheus binary will be installed
-default['prometheus']['dir'] = '/opt/prometheus'
-
-# Location of Prometheus binary
-default['prometheus']['binary'] = "#{node['prometheus']['dir']}/prometheus"
-
-# Location of Prometheus pid file
-default['prometheus']['pid'] = '/var/run/prometheus.pid'
-
-# Install method. Currently supports source, binary and shell_binary.
-default['prometheus']['install_method'] = 'binary'
-
-# Init style.
-# rubocop:disable Style/ConditionalAssignment
-case node['platform_family']
-when 'debian'
- if node['platform'] == 'ubuntu' && node['platform_version'].to_f < 15.04
- default['prometheus']['init_style'] = 'upstart'
- elsif node['platform'] == 'debian' && node['platform_version'].to_f < 8.0
- default['prometheus']['init_style'] = 'runit'
- else
- default['prometheus']['init_style'] = 'systemd'
- end
-when 'rhel', 'fedora'
- if node['platform_version'].to_i >= 7
- default['prometheus']['init_style'] = 'systemd'
- else
- default['prometheus']['init_style'] = 'init'
- end
-else
- default['prometheus']['init_style'] = 'init'
-end
-# rubocop:enable Style/ConditionalAssignment
-
-# Location for Prometheus logs
-default['prometheus']['log_dir'] = '/var/log/prometheus'
-
-# Prometheus version to build
-default['prometheus']['version'] = '2.2.1'
-
-# Prometheus source repository.
-default['prometheus']['source']['git_repository'] = 'https://github.com/prometheus/prometheus.git'
-
-# Prometheus source repository git reference. Defaults to version tag. Can
-# also be set to a branch or master.
-default['prometheus']['source']['git_revision'] = "v#{node['prometheus']['version']}"
-
-# System user to use
-default['prometheus']['user'] = 'prometheus'
-
-# System group to use
-default['prometheus']['group'] = 'prometheus'
-
-# Set if you want ot use the root user
-default['prometheus']['use_existing_user'] = false
-
-# Location for Prometheus pre-compiled binary.
-# Default for testing purposes
-default['prometheus']['binary_url'] = "https://github.com/prometheus/prometheus/releases/download/v#{node['prometheus']['version']}/prometheus-#{node['prometheus']['version']}.linux-amd64.tar.gz"
-
-# Checksum for pre-compiled binary
-# Default for testing purposes
-default['prometheus']['checksum'] = 'caddbbbe3ef8545c6cefb32f9a11207ae18dcc788e8d0fb19659d88c58d14b37'
-
-# If file extension of your binary can not be determined by the URL
-# then define it here. Example 'tar.bz2'
-default['prometheus']['file_extension'] = ''
-
-# Should we allow external config changes?
-default['prometheus']['allow_external_config'] = false
-
-# Prometheus job configuration chef template name.
-default['prometheus']['job_config_template_name'] = 'prometheus.yml.erb'
-
-# Prometheus custom configuration cookbook. Use this if you'd like to bypass the
-# default prometheus cookbook job configuration template and implement your own
-# templates and recipes to configure Prometheus jobs.
-default['prometheus']['job_config_cookbook_name'] = 'prometheus'
-
-# FLAGS Section: Any attributes defined under the flags hash will be used to
-# generate the command line flags for the Prometheus executable.
-
-# Prometheus configuration file name.
-
-default['prometheus']['v2_cli_flags'] = [
- 'web.enable-lifecycle'
-]
-
-default['prometheus']['flags']['config.file'] = "#{node['prometheus']['dir']}/prometheus.yml"
-default['prometheus']['v2_cli_opts']['config.file'] = "#{node['prometheus']['dir']}/prometheus.yml"
-
-# Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal, panic].
-default['prometheus']['flags']['log.level'] = 'info'
-default['prometheus']['v2_cli_opts']['log.level'] = 'info'
-
-# Alert manager HTTP API timeout.
-timeout_flag = Gem::Version.new(node['prometheus']['version']) <= Gem::Version.new('0.16.2') ? 'http-deadline' : 'timeout'
-default['prometheus']['flags']["alertmanager.#{timeout_flag}"] = '10s'
-default['prometheus']['v2_cli_opts']["alertmanager.#{timeout_flag}"] = '10s'
-
-# The capacity of the queue for pending alert manager notifications.
-default['prometheus']['flags']['alertmanager.notification-queue-capacity'] = 100
-default['prometheus']['v2_cli_opts']['alertmanager.notification-queue-capacity'] = 100
-
-# The URL of the alert manager to send notifications to.
-default['prometheus']['flags']['alertmanager.url'] = 'http://127.0.0.1/alert-manager/'
-
-# Maximum number of queries executed concurrently.
-default['prometheus']['flags']['query.max-concurrency'] = 20
-default['prometheus']['v2_cli_opts']['query.max-concurrency'] = 20
-
-# Staleness delta allowance during expression evaluations.
-default['prometheus']['flags']['query.staleness-delta'] = '5m'
-default['prometheus']['v2_cli_opts']['query.lookback-delta'] = '5m'
-
-# Maximum time a query may take before being aborted.
-default['prometheus']['flags']['query.timeout'] = '2m'
-default['prometheus']['v2_cli_opts']['query.timeout'] = '2m'
-
-# If approx. that many time series are in a state that would require a recovery
-# operation after a crash, a checkpoint is triggered, even if the checkpoint interval
-# hasn't passed yet. A recovery operation requires a disk seek. The default limit
-# intends to keep the recovery time below 1min even on spinning disks. With SSD,
-# recovery is much faster, so you might want to increase this value in that case
-# to avoid overly frequent checkpoints.
-default['prometheus']['flags']['storage.local.checkpoint-dirty-series-limit'] = 5000
-
-# The period at which the in-memory index of time series is checkpointed.
-default['prometheus']['flags']['storage.local.checkpoint-interval'] = '5m'
-
-# If set, the local storage layer will perform crash recovery even if the last
-# shutdown appears to be clean.
-default['prometheus']['flags']['storage.local.dirty'] = false
-
-# The size in bytes for the fingerprint to metric index cache.
-default['prometheus']['flags']['storage.local.index-cache-size.fingerprint-to-metric'] = 10485760
-
-# The size in bytes for the metric time range index cache.
-default['prometheus']['flags']['storage.local.index-cache-size.fingerprint-to-timerange'] = 5242880
-
-# The size in bytes for the label name to label values index cache.
-default['prometheus']['flags']['storage.local.index-cache-size.label-name-to-label-values'] = 10485760
-
-# The size in bytes for the label pair to fingerprints index cache.
-default['prometheus']['flags']['storage.local.index-cache-size.label-pair-to-fingerprints'] = 20971520
-
-# How many chunks to keep in memory. While the size of a chunk is 1kiB, the total
-# memory usage will be significantly higher than this value * 1kiB. Furthermore,
-# for various reasons, more chunks might have to be kept in memory temporarily.
-default['prometheus']['flags']['storage.local.memory-chunks'] = 1048576
-
-# Base path for metrics storage.
-default['prometheus']['flags']['storage.local.path'] = '/var/lib/prometheus'
-default['prometheus']['v2_cli_opts']['storage.tsdb.path'] = '/var/lib/prometheus'
-
-# If set, a crash recovery will perform checks on each series file. This might take a very long time.
-default['prometheus']['flags']['storage.local.pedantic-checks'] = false
-
-# How long to retain samples in the local storage.
-default['prometheus']['flags']['storage.local.retention'] = '360h0m0s'
-default['prometheus']['v2_cli_opts']['storage.tsdb.retention'] = '15d'
-
-# When to sync series files after modification. Possible values:
-# 'never', 'always', 'adaptive'. Sync'ing slows down storage performance
-# but reduces the risk of data loss in case of an OS crash. With the
-# 'adaptive' strategy, series files are sync'd for as long as the storage
-# is not too much behind on chunk persistence.
-default['prometheus']['flags']['storage.local.series-sync-strategy'] = 'adaptive'
-
-# The URL of the remote InfluxDB server to send samples to. None, if empty.
-default['prometheus']['flags']['storage.remote.influxdb-url'] = ''
-
-# The name of the database to use for storing samples in InfluxDB.
-default['prometheus']['flags']['storage.remote.influxdb.database'] = 'prometheus'
-
-# The InfluxDB retention policy to use.
-default['prometheus']['flags']['storage.remote.influxdb.retention-policy'] = 'default'
-
-# The URL of the OpenTSDB instance to send samples to. None, if empty.
-default['prometheus']['flags']['storage.remote.opentsdb-url'] = ''
-
-# The timeout to use when sending samples to the remote storage.
-default['prometheus']['flags']['storage.remote.timeout'] = '30s'
-
-# prometheus v2.x flags
-default['prometheus']['flags']['storage.remote.timeout'] = '30s'
-
-# Path to the console library directory.
-default['prometheus']['flags']['web.console.libraries'] = 'console_libraries'
-
-# Path to the console template directory, available at /console.
-default['prometheus']['flags']['web.console.templates'] = 'consoles'
-
-# Enable remote service shutdown.
-default['prometheus']['flags']['web.enable-remote-shutdown'] = false
-
-# The URL under which Prometheus is externally reachable (for
-# example, if Prometheus is served via a reverse proxy). Used for
-# generating relative and absolute links back to Prometheus itself. If
-# omitted, relevant URL components will be derived automatically.
-default['prometheus']['flags']['web.external-url'] = ''
-
-# Address to listen on for the web interface, API, and telemetry.
-default['prometheus']['flags']['web.listen-address'] = ':9090'
-
-# Path under which to expose metrics.
-default['prometheus']['flags']['web.telemetry-path'] = '/metrics'
-
-# Read assets/templates from file instead of binary.
-# web.use-local-assets flag got removed in 0.17
-# https://github.com/prometheus/prometheus/commit/a542cc86096e1bad694e04d307301a807583dfc6
-if Gem::Version.new(node['prometheus']['version']) <= Gem::Version.new('0.16.2')
- default['prometheus']['flags']['web.use-local-assets'] = false
-end
-
-# Path to static asset directory, available at /user.
-default['prometheus']['flags']['web.user-assets'] = ''
-
-# Alertmanager attributes
-
-# Install method. Currently supports source and binary.
-default['prometheus']['alertmanager']['install_method'] = 'binary'
-
-# Location of Alertmanager binary
-default['prometheus']['alertmanager']['binary'] = "#{node['prometheus']['dir']}/alertmanager"
-
-# Alertmanager version to build
-default['prometheus']['alertmanager']['version'] = '0.14.0'
-
-# Alertmanager source repository.
-default['prometheus']['alertmanager']['git_repository'] = 'https://github.com/prometheus/alertmanager.git'
-
-# Alertmanager source repository git reference. Defaults to version tag. Can
-# also be set to a branch or master.
-default['prometheus']['alertmanager']['git_revision'] = "v#{node['prometheus']['alertmanager']['version']}"
-
-# Location for Alertmanager pre-compiled binary.
-# Default for testing purposes
-default['prometheus']['alertmanager']['binary_url'] = "https://github.com/prometheus/alertmanager/releases/download/v#{node['prometheus']['alertmanager']['version']}/alertmanager-#{node['prometheus']['alertmanager']['version']}.linux-amd64.tar.gz"
-
-# Checksum for pre-compiled binary
-# Default for testing purposes
-default['prometheus']['alertmanager']['checksum'] = 'caddbbbe3ef8545c6cefb32f9a11207ae18dcc788e8d0fb19659d88c58d14b37'
-
-# If file extension of your binary can not be determined by the URL
-# then define it here. Example 'tar.bz2'
-default['prometheus']['alertmanager']['file_extension'] = ''
-
-# Alertmanager configuration file name.
-default['prometheus']['alertmanager']['config.file'] = "#{node['prometheus']['dir']}/alertmanager.yml"
-
-# Alertmanager configuration storage directory
-default['prometheus']['alertmanager']['storage.path'] = "#{node['prometheus']['dir']}/data"
-
-# Alertmanager configuration chef template name.
-default['prometheus']['alertmanager']['config_cookbook_name'] = 'prometheus'
-
-# Alertmanager custom configuration cookbook. Use this if you'd like to bypass the
-# default prometheus cookbook Alertmanager configuration template and implement your own
-# templates and recipes to configure Alertmanager.
-default['prometheus']['alertmanager']['config_template_name'] = 'alertmanager.yml.erb'
-
-# Array of alert rules filenames to be inserted in prometheus.yml.erb under "rule_files"
-default['prometheus']['rule_filenames'] = nil
-
-default['prometheus']['alertmanager']['notification'] = {}
-
-default['prometheus']['global']['scrape_interval'] = '60s'
-default['prometheus']['global']['evaluation_interval'] = '60s'
diff --git a/lc-gdn-chef/cookbooks/prometheus/chefignore b/lc-gdn-chef/cookbooks/prometheus/chefignore
deleted file mode 100644
index 138a808b67633b24ad88bf2d33fafca5f30e8e16..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/chefignore
+++ /dev/null
@@ -1,94 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# or sharing to the community site.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-Icon?
-nohup.out
-ehthumbs.db
-Thumbs.db
-
-# SASS #
-########
-.sass-cache
-
-# EDITORS #
-###########
-\#*
-.#*
-*~
-*.sw[a-z]
-*.bak
-REVISION
-TAGS*
-tmtags
-*_flymake.*
-*_flymake
-*.tmproj
-.project
-.settings
-mkmf.log
-
-## COMPILED ##
-##############
-a.out
-*.o
-*.pyc
-*.so
-*.com
-*.class
-*.dll
-*.exe
-*/rdoc/
-
-# Testing #
-###########
-.watchr
-.rspec
-spec/*
-spec/fixtures/*
-test/*
-features/*
-Guardfile
-Procfile
-
-# SCM #
-#######
-.git
-*/.git
-.gitignore
-.gitmodules
-.gitconfig
-.gitattributes
-.svn
-*/.bzr/*
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-cookbooks/*
-tmp
-
-# Cookbooks #
-#############
-CONTRIBUTING
-CHANGELOG*
-
-# Strainer #
-############
-Colanderfile
-Strainerfile
-.colander
-.strainer
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
-
-# Travis #
-##########
-.travis.yml
diff --git a/lc-gdn-chef/cookbooks/prometheus/libraries/prometheus_helper.rb b/lc-gdn-chef/cookbooks/prometheus/libraries/prometheus_helper.rb
deleted file mode 100644
index bffb1059cd10e4e7826d29f36cc6c2155ae41ed0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/libraries/prometheus_helper.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-
-def generate_flags
- config = ''
- if Gem::Version.new(node['prometheus']['version']) < Gem::Version.new('2.0.0-alpha.0')
- # Generate cli opts for prometheus 1.x
- node['prometheus']['flags'].each do |flag_key, flag_value|
- config += "-#{flag_key}=#{flag_value} " if flag_value != ''
- end
- else
- # Generate cli opts for prometheus 2.x & hopefully beyond if there are no changes
- node['prometheus']['v2_cli_opts'].each do |opt_key, opt_value|
- config += "--#{opt_key}=#{opt_value} " if opt_value != ''
- end
- node['prometheus']['v2_cli_flags'].each do |opt_flag|
- config += "--#{opt_flag} " if opt_flag != ''
- end
- end
- config
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/metadata.rb b/lc-gdn-chef/cookbooks/prometheus/metadata.rb
deleted file mode 100644
index 9c4a7d18a6528952fdf50403dc01ed9763c1bcb4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/metadata.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-name 'prometheus'
-maintainer 'Elijah Wright'
-maintainer_email 'elijah.wright@gmail.com'
-license 'Apache-2.0'
-description 'Installs/Configures Prometheus'
-long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
-version '0.6.5'
-source_url 'https://github.com/elijah/chef-prometheus'
-issues_url 'https://github.com/elijah/chef-prometheus/issues'
-chef_version '>= 12.15.25', '< 14' if respond_to?(:chef_version)
-
-%w(ubuntu debian centos redhat fedora).each do |os|
- supports os
-end
-
-depends 'apt'
-depends 'yum'
-depends 'build-essential'
-depends 'runit', '>= 1.5'
-depends 'ark'
-depends 'golang'
-depends 'compat_resource'
-
-# this should really be a suggests
-depends 'bluepill', '~> 2.3'
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager.rb
deleted file mode 100644
index 11fae346b8762eafc1cb795d4752b17756058325..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager.rb
+++ /dev/null
@@ -1,141 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: alertmanager
-#
-# Author: Paul Magrath
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'build-essential::default'
-
-user node['prometheus']['user'] do
- system true
- shell '/bin/false'
- home node['prometheus']['dir']
- not_if { node['prometheus']['use_existing_user'] == true || node['prometheus']['user'] == 'root' }
-end
-
-directory node['prometheus']['dir'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-directory node['prometheus']['log_dir'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-directory node['prometheus']['alertmanager']['storage.path'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-# -- Write our Config -- #
-
-template node['prometheus']['alertmanager']['config.file'] do
- cookbook node['prometheus']['alertmanager']['config_cookbook_name']
- source node['prometheus']['alertmanager']['config_template_name']
- mode '0644'
- owner node['prometheus']['user']
- group node['prometheus']['group']
- variables(
- notification_config: node['prometheus']['alertmanager']['notification']
- )
- notifies :restart, 'service[alertmanager]'
-end
-
-# -- Do the install -- #
-
-include_recipe "prometheus::alertmanager_#{node['prometheus']['alertmanager']['install_method']}"
-
-case node['prometheus']['init_style']
-when 'runit'
- include_recipe 'runit::default'
-
- runit_service 'alertmanager' do
- default_logger true
- end
-when 'bluepill'
- include_recipe 'bluepill::default'
-
- template "#{node['bluepill']['conf_dir']}/alertmanager.pill" do
- source 'alertmanager.pill.erb'
- mode '0644'
- end
-
- bluepill_service 'alertmanager' do
- action [:enable, :load]
- end
-when 'systemd'
- # rubocop:disable Style/HashSyntax
- dist_dir, conf_dir, env_file = value_for_platform_family(
- ['fedora'] => %w(fedora sysconfig alertmanager),
- ['rhel'] => %w(redhat sysconfig alertmanager),
- ['debian'] => %w(debian default alertmanager)
- )
-
- template '/etc/systemd/system/alertmanager.service' do
- source 'systemd/alertmanager.service.erb'
- mode '0644'
- variables(:sysconfig_file => "/etc/#{conf_dir}/#{env_file}")
- notifies :restart, 'service[alertmanager]', :delayed
- end
-
- template "/etc/#{conf_dir}/#{env_file}" do
- source "#{dist_dir}/#{conf_dir}/alertmanager.erb"
- mode '0644'
- notifies :restart, 'service[alertmanager]', :delayed
- end
-
- service 'alertmanager' do
- supports :status => true, :restart => true
- action [:enable, :start]
- end
- # rubocop:enable Style/HashSyntax
-when 'upstart'
- template '/etc/init/alertmanager.conf' do
- source 'upstart/alertmanager.service.erb'
- mode '0644'
- notifies :restart, 'service[alertmanager]', :delayed
- end
-
- service 'alertmanager' do
- provider Chef::Provider::Service::Upstart
- action [:enable, :start]
- end
-else
- template '/etc/init.d/alertmanager' do
- source 'alertmanager.erb'
- owner 'root'
- group node['root_group']
- mode '0755'
- notifies :restart, 'service[alertmanager]', :delayed
- end
-end
-
-# rubocop:disable Style/HashSyntax
-service 'alertmanager' do
- supports :status => true, :restart => true, :reload => true
-end
-# rubocop:enable Style/HashSyntax
-
-service 'alertmanager' do
- action [:enable, :start]
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_binary.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_binary.rb
deleted file mode 100644
index 26cc2b665e221a3b36f3de03da02e857aa0757eb..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_binary.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: alertmanager_binary
-#
-# Author: Javier Zunzunegui
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'ark::default'
-
-%w(curl tar bzip2).each do |pkg|
- package pkg
-end
-
-dir_name = ::File.basename(node['prometheus']['dir'])
-dir_path = ::File.dirname(node['prometheus']['dir'])
-
-ark dir_name do
- url node['prometheus']['alertmanager']['binary_url']
- checksum node['prometheus']['alertmanager']['checksum']
- version node['prometheus']['alertmanager']['version']
- prefix_root Chef::Config['file_cache_path']
- path dir_path
- owner node['prometheus']['user']
- group node['prometheus']['group']
- extension node['prometheus']['alertmanager']['file_extension'] unless node['prometheus']['alertmanager']['file_extension'].empty?
- action :put
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_source.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_source.rb
deleted file mode 100644
index 8d9e4545d2f6a03eca90a0ca30ef259ca15bd904..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/alertmanager_source.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: alertmanager_source
-#
-# Author: Javier Zunzunegui
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# These packages are needed go build
-%w(curl git-core mercurial gzip sed).each do |pkg|
- package pkg
-end
-
-git "#{Chef::Config[:file_cache_path]}/alertmanager-#{node['prometheus']['alertmanager']['version']}" do
- repository node['prometheus']['alertmanager']['git_repository']
- revision node['prometheus']['alertmanager']['git_revision']
- action :checkout
-end
-
-bash 'compile_alertmanager_source' do
- cwd "#{Chef::Config[:file_cache_path]}/alertmanager-#{node['prometheus']['alertmanager']['version']}"
- code "make && mv alertmanager #{node['prometheus']['dir']}"
-
- notifies :restart, 'service[alertmanager]'
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/binary.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/binary.rb
deleted file mode 100644
index 1ee606a843e240205f0758572aae7c5e3974c1f0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/binary.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: binary
-#
-# Author: Kristian Jarvenpaa
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'ark::default'
-
-%w(curl tar bzip2).each do |pkg|
- package pkg
-end
-
-dir_name = ::File.basename(node['prometheus']['dir'])
-dir_path = ::File.dirname(node['prometheus']['dir'])
-
-ark dir_name do
- url node['prometheus']['binary_url']
- checksum node['prometheus']['checksum']
- version node['prometheus']['version']
- prefix_root Chef::Config['file_cache_path']
- path dir_path
- owner node['prometheus']['user']
- group node['prometheus']['group']
- extension node['prometheus']['file_extension'] unless node['prometheus']['file_extension'].empty?
- action :put
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/default.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/default.rb
deleted file mode 100644
index 5ef9dd2b23b6a7d0a2c4d3d73d358e024c021265..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/default.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: default
-#
-# Author: Ray Rodriguez
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-user node['prometheus']['user'] do
- system true
- shell '/bin/false'
- home node['prometheus']['dir']
- not_if { node['prometheus']['use_existing_user'] == true || node['prometheus']['user'] == 'root' }
-end
-
-directory node['prometheus']['dir'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-directory node['prometheus']['log_dir'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-directory node['prometheus']['flags']['storage.local.path'] do
- owner node['prometheus']['user']
- group node['prometheus']['group']
- mode '0755'
- recursive true
-end
-
-apt_update 'please'
-# Ensure that any unpacking of prometheus doesn't blow away our own configuration
-include_recipe "prometheus::#{node['prometheus']['install_method']}"
-
-# -- Write our Config -- #
-
-template node['prometheus']['flags']['config.file'] do
- action :nothing
- cookbook node['prometheus']['job_config_cookbook_name']
- source node['prometheus']['job_config_template_name']
- mode '0644'
- owner node['prometheus']['user']
- group node['prometheus']['group']
- variables(
- rule_filenames: node['prometheus']['rule_filenames']
- )
- notifies :reload, 'service[prometheus]'
-end
-
-# monitor our server instance
-prometheus_job 'prometheus' do
- scrape_interval '15s'
- target "localhost#{node['prometheus']['flags']['web.listen-address']}"
- metrics_path node['prometheus']['flags']['web.telemetry-path']
-end
-
-# -- Do the install -- #
-
-include_recipe 'prometheus::service'
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/service.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/service.rb
deleted file mode 100644
index 7ff84e89ea0cdc3ee6705d56de36e590d7d66558..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/service.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: service
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-case node['prometheus']['init_style']
-when 'runit'
- include_recipe 'runit::default'
-
- runit_service 'prometheus' do
- default_logger true
- end
-when 'bluepill'
- include_recipe 'bluepill::default'
-
- template "#{node['bluepill']['conf_dir']}/prometheus.pill" do
- source 'prometheus.pill.erb'
- mode '0644'
- end
-
- bluepill_service 'prometheus' do
- action [:enable, :load]
- end
-when 'systemd'
- # rubocop:disable Style/HashSyntax
- dist_dir, conf_dir, env_file = value_for_platform_family(
- ['fedora'] => %w(fedora sysconfig prometheus),
- ['rhel'] => %w(redhat sysconfig prometheus),
- ['debian'] => %w(debian default prometheus)
- )
-
- template '/etc/systemd/system/prometheus.service' do
- source 'systemd/prometheus.service.erb'
- mode '0644'
- variables(:sysconfig_file => "/etc/#{conf_dir}/#{env_file}")
- notifies :restart, 'service[prometheus]', :delayed
- end
-
- template "/etc/#{conf_dir}/#{env_file}" do
- source "#{dist_dir}/#{conf_dir}/prometheus.erb"
- mode '0644'
- notifies :restart, 'service[prometheus]', :delayed
- end
-
- service 'prometheus' do
- supports :status => true, :restart => true
- action [:enable, :start]
- end
- # rubocop:enable Style/HashSyntax
-when 'upstart'
- template '/etc/init/prometheus.conf' do
- source 'upstart/prometheus.service.erb'
- mode '0644'
- notifies :restart, 'service[prometheus]', :delayed
- end
-
- service 'prometheus' do
- provider Chef::Provider::Service::Upstart
- action [:enable, :start]
- end
-else
- template '/etc/init.d/prometheus' do
- source 'prometheus.erb'
- owner 'root'
- group node['root_group']
- mode '0755'
- notifies :restart, 'service[prometheus]', :delayed
- end
-end
-
-# rubocop:disable Style/HashSyntax
-service 'prometheus' do
- supports :status => true, :restart => true, :reload => true
-end
-# rubocop:enable Style/HashSyntax
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/shell_binary.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/shell_binary.rb
deleted file mode 100644
index cceaa9a22a80caebf62591a250f9d2987d62885f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/shell_binary.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: shell_binary
-#
-# Author: Rohit Gupta - @rohit01
-#
-# This recipie is similar to binary.rb without 'ark' dependency
-#
-
-%w(curl tar bzip2).each do |pkg|
- package pkg
-end
-
-bash 'download_prometheus' do
- code <<-EOH
- pfilename="#{Chef::Config[:file_cache_path]}/prometheus-#{node['prometheus']['version']}.tar.gz"
- curl -L -o "${pfilename}" "#{node['prometheus']['binary_url']}"
- chksum="$(shasum -b -a 256 ${pfilename} | awk '{print $1}')"
- if [ "${chksum}" != "#{node['prometheus']['checksum']}" ]; then
- echo "ERROR: Downloaded file checksum mismatch. Aborting!"
- exit 1
- fi
- EOH
- user 'root'
- group 'root'
- creates "#{Chef::Config[:file_cache_path]}/prometheus-#{node['prometheus']['version']}.tar.gz"
- action :run
- notifies :run, 'bash[install_prometheus]', :immediately
-end
-
-bash 'install_prometheus' do
- code <<-EOH
- mkdir -p "#{node['prometheus']['dir']}"
- tar -xzf "#{Chef::Config[:file_cache_path]}/prometheus-#{node['prometheus']['version']}.tar.gz" -C "#{node['prometheus']['dir']}" --strip-components=1
- EOH
- user 'root'
- group 'root'
- action :nothing
- notifies :restart, 'service[prometheus]'
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/source.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/source.rb
deleted file mode 100644
index c6f8a021f1c3a00c96adc5d855f8113fea481d3a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/source.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: default
-#
-# Author: Ray Rodriguez
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'build-essential::default'
-include_recipe 'golang::default'
-
-# These packages are needed go build
-%w(curl git-core mercurial gzip sed).each do |pkg|
- package pkg
-end
-
-git "#{Chef::Config[:file_cache_path]}/prometheus-#{node['prometheus']['version']}" do
- repository node['prometheus']['source']['git_repository']
- revision node['prometheus']['source']['git_revision']
- action :checkout
-end
-
-bash 'compile_prometheus_source' do
- cwd "#{Chef::Config[:file_cache_path]}/prometheus-#{node['prometheus']['version']}"
- environment 'PATH' => "/usr/local/go/bin:#{ENV['PATH']}", 'GOPATH' => "/opt/go:#{node['go']['gopath']}:/opt/go/src/github.com/prometheus/promu/vendor"
- code <<-EOH
- make build &&
- mv prometheus #{node['prometheus']['dir']} &&
- cp -R console_libraries #{node['prometheus']['dir']} &&
- cp -R consoles #{node['prometheus']['dir']}
- EOH
-
- notifies :restart, 'service[prometheus]'
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/recipes/use_lwrp.rb b/lc-gdn-chef/cookbooks/prometheus/recipes/use_lwrp.rb
deleted file mode 100644
index 101d5337288c1e33158264e21d71380ca5094e22..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/recipes/use_lwrp.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-# Cookbook Name:: prometheus
-# Recipe:: use_lwrp
-#
-# Author: Robert Berger
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This recipe does nothing, allows you to `include_recipe "prometheus::use_lwrp"`
-# so you can override attributes and use the LWRP[s] in a wrapper cookbook.
-# Then `include_recipe "prometheus::default"` to actually install and configure prometheus.
-
-# New workflow:
-# - Create wrapper cookbook that
-# - `include_recipe "prometheus::use_lwrp"
-# - Collects any jobs / targets,
-# - Use prometheus_job to create the jobs
-# - Override any needed attributes
-# - `include_recipe "prometheus::default`
diff --git a/lc-gdn-chef/cookbooks/prometheus/resources/job.rb b/lc-gdn-chef/cookbooks/prometheus/resources/job.rb
deleted file mode 100644
index d9d47aa3f24ff714ef2eb75b479e4511a2a00392..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/resources/job.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-property :scrape_interval, String
-property :scrape_timeout, String
-property :labels, Hash
-property :target, [Array, String]
-property :metrics_path, String, default: '/metrics'
-property :config_file, String, default: lazy { node['prometheus']['flags']['config.file'] }
-
-default_action :create
-
-action :create do
- with_run_context :root do
- edit_resource(:template, config_file) do |new_resource|
- variables[:jobs] ||= {}
- variables[:jobs][new_resource.name] ||= {}
- variables[:jobs][new_resource.name]['scrape_interval'] = new_resource.scrape_interval
- variables[:jobs][new_resource.name]['scrape_timeout'] = new_resource.scrape_timeout
- variables[:jobs][new_resource.name]['target'] = new_resource.target
- variables[:jobs][new_resource.name]['metrics_path'] = new_resource.metrics_path
- variables[:jobs][new_resource.name]['labels'] = new_resource.labels
-
- action :nothing
- delayed_action :create
-
- not_if { node['prometheus']['allow_external_config'] }
- end
- end
-end
-
-action :delete do
- template config_file do
- action :delete
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/spec/spec_helper.rb b/lc-gdn-chef/cookbooks/prometheus/spec/spec_helper.rb
deleted file mode 100644
index b640ebaa930a92161ed4a15d5fddf28bcd2c71b4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/spec/spec_helper.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-require 'chefspec'
-require 'chefspec/berkshelf'
-require_relative 'support/matchers'
-
-RSpec.configure do |config|
- # No WARN messages during testing
- config.log_level = :error
-end
-
-ChefSpec::Coverage.start!
diff --git a/lc-gdn-chef/cookbooks/prometheus/spec/support/matchers.rb b/lc-gdn-chef/cookbooks/prometheus/spec/support/matchers.rb
deleted file mode 100644
index 22df5a768de710fb08bd9553714d018536fc38c4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/spec/support/matchers.rb
+++ /dev/null
@@ -1,4 +0,0 @@
-
-def put_ark(resource_name)
- ChefSpec::Matchers::ResourceMatcher.new(:ark, :put, resource_name)
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/alertmanager_spec.rb b/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/alertmanager_spec.rb
deleted file mode 100644
index 9063c0e3f7275c5750fc61214f8ac82e9bcde8cc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/alertmanager_spec.rb
+++ /dev/null
@@ -1,303 +0,0 @@
-#
-# Filename:: alertmanager_spec.rb
-# Description:: Verifies alertmanager recipe(s).
-#
-# Author: Elijah Caine
-#
-
-require 'spec_helper'
-
-# Caution: This is a carbon-copy of default_spec.rb with some variable replacements.
-
-describe 'prometheus::alertmanager' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(
- platform: 'ubuntu',
- version: '16.04',
- file_cache_path: '/tmp/chef/cache'
- ).converge(described_recipe)
- end
-
- before do
- stub_command('/usr/local/go/bin/go version | grep "go1.5 "').and_return(0)
- end
-
- it 'creates a user with correct attributes' do
- expect(chef_run).to create_user('prometheus').with(
- system: true,
- shell: '/bin/false',
- home: '/opt/prometheus'
- )
- end
-
- it 'creates a directory at /opt/prometheus' do
- expect(chef_run).to create_directory('/opt/prometheus').with(
- owner: 'prometheus',
- group: 'prometheus',
- mode: '0755',
- recursive: true
- )
- end
-
- it 'creates a directory at /var/log/prometheus' do
- expect(chef_run).to create_directory('/var/log/prometheus').with(
- owner: 'prometheus',
- group: 'prometheus',
- mode: '0755',
- recursive: true
- )
- end
-
- it 'renders a prometheus job configuration file and notifies prometheus to restart' do
- resource = chef_run.template('/opt/prometheus/alertmanager.yml')
- expect(resource).to notify('service[alertmanager]').to(:restart)
- end
-
- # Test for source.rb
-
- context 'source' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['alertmanager']['version'] = '0.14.0'
- node.set['prometheus']['alertmanager']['install_method'] = 'source'
- end.converge(described_recipe)
- end
-
- it 'includes build-essential' do
- expect(chef_run).to include_recipe('build-essential::default')
- end
-
- %w(curl git-core mercurial gzip sed).each do |pkg|
- it 'installs #{pkg}' do
- expect(chef_run).to install_package(pkg)
- end
- end
-
- it 'checks out alertmanager from github' do
- expect(chef_run).to checkout_git("#{Chef::Config[:file_cache_path]}/alertmanager-0.14.0").with(
- repository: 'https://github.com/prometheus/alertmanager.git',
- revision: 'v0.14.0'
- )
- end
-
- it 'compiles alertmanager source' do
- expect(chef_run).to run_bash('compile_alertmanager_source')
- end
-
- it 'notifies alertmanager to restart' do
- resource = chef_run.bash('compile_alertmanager_source')
- expect(resource).to notify('service[alertmanager]').to(:restart)
- end
-
- context 'runit' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'runit'
- end.converge(described_recipe)
- end
-
- it 'includes runit::default recipe' do
- expect(chef_run).to include_recipe('runit::default')
- end
-
- it 'enables runit_service' do
- expect(chef_run).to enable_runit_service('alertmanager')
- end
- end
-
- context 'bluepill' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'bluepill'
- end.converge(described_recipe)
- end
-
- it 'includes bluepill::default recipe' do
- expect(chef_run).to include_recipe('bluepill::default')
- end
-
- it 'renders a bluepill configuration file' do
- expect(chef_run).to render_file("#{chef_run.node['bluepill']['conf_dir']}/alertmanager.pill")
- end
- end
-
- context 'init' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'init'
- end.converge(described_recipe)
- end
-
- it 'renders an init.d configuration file' do
- expect(chef_run).to render_file('/etc/init.d/alertmanager')
- end
- end
-
- context 'systemd' do
- unit_file = '/etc/systemd/system/alertmanager.service'
-
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'systemd'
- node.set['prometheus']['user'] = 'prom_user'
- node.set['prometheus']['group'] = 'prom_group'
- node.set['prometheus']['alertmanager']['binary'] = '/tmp/alertmanager'
- node.set['prometheus']['alertmanager']['storage.path'] = '/tmp/alertmanager_data'
- node.set['prometheus']['alertmanager']['config.file'] = '/tmp/alertmanager.conf'
- node.set['prometheus']['flags']['alertmanager.url'] = 'http://0.0.0.0:8080'
- end.converge(described_recipe)
- end
-
- it 'renders a systemd service file' do
- expect(chef_run).to render_file(unit_file)
- end
-
- it 'renders systemd unit with custom variables' do
- expect(chef_run).to render_file(unit_file).with_content { |content|
- expect(content).to include('ExecStart=/tmp/alertmanager')
- expect(content).to include('-storage.path=/tmp/alertmanager_data \\')
- expect(content).to include('-config.file=/tmp/alertmanager.conf \\')
- expect(content).to include('-web.external-url=http://0.0.0.0:8080')
- expect(content).to include('User=prom_user')
- expect(content).to include('Group=prom_group')
- }
- end
- end
-
- context 'upstart' do
- job_file = '/etc/init/alertmanager.conf'
-
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'upstart'
- node.set['prometheus']['user'] = 'prom_user'
- node.set['prometheus']['group'] = 'prom_group'
- node.set['prometheus']['alertmanager']['binary'] = '/tmp/alertmanager'
- node.set['prometheus']['alertmanager']['storage.path'] = '/tmp/alertmanager_data'
- node.set['prometheus']['alertmanager']['config.file'] = '/tmp/alertmanager.conf'
- node.set['prometheus']['flags']['alertmanager.url'] = 'http://0.0.0.0:8080'
- node.set['prometheus']['log_dir'] = '/tmp'
- end.converge(described_recipe)
- end
-
- it 'renders an upstart job configuration file' do
- expect(chef_run).to render_file(job_file)
- end
-
- it 'renders an upstart job configuration with custom variables' do
- expect(chef_run).to render_file(job_file).with_content { |content|
- expect(content).to include('setuid prom_user')
- expect(content).to include('setgid prom_group')
- expect(content).to include('exec >> "/tmp/alertmanager.log"')
- expect(content).to include('exec /tmp/alertmanager')
- expect(content).to include('-storage.path=/tmp/alertmanager_data')
- expect(content).to include('-config.file=/tmp/alertmanager.conf')
- expect(content).to include('-web.external-url=http://0.0.0.0:8080')
- }
- end
- end
- end
-
- # Test for binary.rb
-
- context 'binary' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['alertmanager']['version'] = '0.14.0'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'runs ark with correct attributes' do
- expect(chef_run).to put_ark('prometheus').with(
- url: 'https://github.com/prometheus/alertmanager/releases/download/v0.14.0/alertmanager-0.14.0.linux-amd64.tar.gz',
- checksum: 'caddbbbe3ef8545c6cefb32f9a11207ae18dcc788e8d0fb19659d88c58d14b37',
- version: '0.14.0',
- prefix_root: Chef::Config['file_cache_path'],
- path: '/opt',
- owner: 'prometheus',
- group: 'prometheus'
- )
- end
-
- it 'runs ark with given file_extension' do
- chef_run.node.set['prometheus']['alertmanager']['file_extension'] = 'tar.gz'
- chef_run.converge(described_recipe)
- expect(chef_run).to put_ark('prometheus').with(
- extension: 'tar.gz'
- )
- end
-
- context 'runit' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'runit'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'includes runit::default recipe' do
- expect(chef_run).to include_recipe('runit::default')
- end
-
- it 'enables runit_service' do
- expect(chef_run).to enable_runit_service('alertmanager')
- end
- end
-
- context 'bluepill' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'bluepill'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'includes bluepill::default recipe' do
- expect(chef_run).to include_recipe('bluepill::default')
- end
-
- it 'renders a bluepill configuration file' do
- expect(chef_run).to render_file("#{chef_run.node['bluepill']['conf_dir']}/alertmanager.pill")
- end
- end
-
- context 'init' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'init'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders an init.d configuration file' do
- expect(chef_run).to render_file('/etc/init.d/alertmanager')
- end
- end
-
- context 'systemd' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'systemd'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders a systemd service file' do
- expect(chef_run).to render_file('/etc/systemd/system/alertmanager.service')
- end
- end
- context 'upstart' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'upstart'
- node.set['prometheus']['alertmanager']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders an upstart job configuration file' do
- expect(chef_run).to render_file('/etc/init/alertmanager.conf')
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/default_spec.rb b/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/default_spec.rb
deleted file mode 100644
index aa83a3272711d07d9f7f9dabfe3bcac2d33bb98c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/spec/unit/recipes/default_spec.rb
+++ /dev/null
@@ -1,261 +0,0 @@
-require 'spec_helper'
-
-describe 'prometheus::default' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(
- platform: 'ubuntu',
- version: '16.04',
- file_cache_path: '/tmp/chef/cache',
- step_into: ['prometheus_job']
- ).converge(described_recipe)
- end
-
- before do
- stub_command('/usr/local/go/bin/go version | grep "go1.5 "').and_return(0)
- end
-
- it 'creates a user with correct attributes' do
- expect(chef_run).to create_user('prometheus').with(
- system: true,
- shell: '/bin/false',
- home: '/opt/prometheus'
- )
- end
-
- it 'creates a directory at /opt/prometheus' do
- expect(chef_run).to create_directory('/opt/prometheus').with(
- owner: 'prometheus',
- group: 'prometheus',
- mode: '0755',
- recursive: true
- )
- end
-
- it 'creates a directory at /var/log/prometheus' do
- expect(chef_run).to create_directory('/var/log/prometheus').with(
- owner: 'prometheus',
- group: 'prometheus',
- mode: '0755',
- recursive: true
- )
- end
-
- it 'renders a prometheus job configuration file and notifies prometheus to reload' do
- resource = chef_run.template('/opt/prometheus/prometheus.yml')
- expect(resource).to notify('service[prometheus]').to(:reload)
- end
-
- it 'uses an attribute to select the prometheus.yml template' do
- chef_run.node.override['prometheus']['job_config_cookbook_name'] = 'other_cookbook'
- chef_run.converge(described_recipe)
- expect(chef_run).to create_template('/opt/prometheus/prometheus.yml').with_cookbook('other_cookbook')
- end
-
- # Test for source.rb
-
- context 'source' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['version'] = '2.2.1'
- node.set['prometheus']['install_method'] = 'source'
- end.converge(described_recipe)
- end
-
- it 'includes build-essential' do
- expect(chef_run).to include_recipe('build-essential::default')
- end
-
- %w(curl git-core mercurial gzip sed).each do |pkg|
- it 'installs #{pkg}' do
- expect(chef_run).to install_package(pkg)
- end
- end
-
- it 'checks out prometheus from github' do
- expect(chef_run).to checkout_git("#{Chef::Config[:file_cache_path]}/prometheus-2.2.1").with(
- repository: 'https://github.com/prometheus/prometheus.git',
- revision: 'v2.2.1'
- )
- end
-
- it 'compiles prometheus source' do
- expect(chef_run).to run_bash('compile_prometheus_source')
- end
-
- it 'notifies prometheus to reload' do
- resource = chef_run.bash('compile_prometheus_source')
- expect(resource).to notify('service[prometheus]').to(:restart)
- end
-
- context 'runit' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'runit'
- end.converge(described_recipe)
- end
-
- it 'includes runit::default recipe' do
- expect(chef_run).to include_recipe('runit::default')
- end
-
- it 'enables runit_service' do
- expect(chef_run).to enable_runit_service('prometheus')
- end
- end
-
- context 'bluepill' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'bluepill'
- end.converge(described_recipe)
- end
-
- it 'includes bluepill::default recipe' do
- expect(chef_run).to include_recipe('bluepill::default')
- end
-
- it 'renders a bluepill configuration file' do
- expect(chef_run).to render_file("#{chef_run.node['bluepill']['conf_dir']}/prometheus.pill")
- end
- end
-
- context 'init' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'init'
- end.converge(described_recipe)
- end
-
- it 'renders an init.d configuration file' do
- expect(chef_run).to render_file('/etc/init.d/prometheus')
- end
- end
-
- context 'systemd' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'systemd'
- end.converge(described_recipe)
- end
-
- it 'renders a systemd service file' do
- expect(chef_run).to render_file('/etc/systemd/system/prometheus.service')
- end
- end
-
- context 'upstart' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'upstart'
- end.converge(described_recipe)
- end
-
- it 'renders an upstart job configuration file' do
- expect(chef_run).to render_file('/etc/init/prometheus.conf')
- end
- end
- end
-
- # Test for binary.rb
-
- context 'binary' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['version'] = '2.2.1'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'runs ark with correct attributes' do
- expect(chef_run).to put_ark('prometheus').with(
- url: 'https://github.com/prometheus/prometheus/releases/download/v2.2.1/prometheus-2.2.1.linux-amd64.tar.gz',
- checksum: 'caddbbbe3ef8545c6cefb32f9a11207ae18dcc788e8d0fb19659d88c58d14b37',
- version: '2.2.1',
- prefix_root: Chef::Config['file_cache_path'],
- path: '/opt',
- owner: 'prometheus',
- group: 'prometheus'
- )
- end
-
- it 'runs ark with given file_extension' do
- chef_run.node.set['prometheus']['file_extension'] = 'tar.gz'
- chef_run.converge(described_recipe)
- expect(chef_run).to put_ark('prometheus').with(
- extension: 'tar.gz'
- )
- end
-
- context 'runit' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'runit'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'includes runit::default recipe' do
- expect(chef_run).to include_recipe('runit::default')
- end
-
- it 'enables runit_service' do
- expect(chef_run).to enable_runit_service('prometheus')
- end
- end
-
- context 'bluepill' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'bluepill'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'includes bluepill::default recipe' do
- expect(chef_run).to include_recipe('bluepill::default')
- end
-
- it 'renders a bluepill configuration file' do
- expect(chef_run).to render_file("#{chef_run.node['bluepill']['conf_dir']}/prometheus.pill")
- end
- end
-
- context 'init' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'init'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders an init.d configuration file' do
- expect(chef_run).to render_file('/etc/init.d/prometheus')
- end
- end
-
- context 'systemd' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'systemd'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders a systemd service file' do
- expect(chef_run).to render_file('/etc/systemd/system/prometheus.service')
- end
- end
- context 'upstart' do
- let(:chef_run) do
- ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04', file_cache_path: '/var/chef/cache') do |node|
- node.set['prometheus']['init_style'] = 'upstart'
- node.set['prometheus']['install_method'] = 'binary'
- end.converge(described_recipe)
- end
-
- it 'renders an upstart job configuration file' do
- expect(chef_run).to render_file('/etc/init/prometheus.conf')
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/centos/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/centos/prometheus.erb
deleted file mode 100644
index 9e670e57aeec73dff636cabe9e8aac77d983fabe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/centos/prometheus.erb
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/bin/sh
-# Init script for prometheus
-# Maintained by
-# Generated by pleaserun.
-# Implemented based on LSB Core 3.1:
-# * Sections: 20.2, 20.3
-#
-### BEGIN INIT INFO
-# Provides: prometheus
-# Required-Start: $remote_fs $syslog
-# Required-Stop: $remote_fs $syslog
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description:
-# Description: no description given
-### END INIT INFO
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-export PATH
-
-name=$(basename $0)
-program="<%= node['prometheus']['binary'] %>"
-args="<%= generate_flags %>"
-pidfile=<%= node['prometheus']['pid'] %>
-user=<%= node['prometheus']['source']['user'] %>
-group=<%= node['prometheus']['source']['group'] %>
-chroot="/"
-chdir="/"
-
-[ -r /etc/default/$name ] && . /etc/default/$name
-[ -r /etc/sysconfig/$name ] && . /etc/sysconfig/$name
-
-trace() {
- logger -t "/etc/init.d/prometheus" "$@"
-}
-
-emit() {
- trace "$@"
- echo "$@"
-}
-
-start() {
-
-
- # Setup any environmental stuff beforehand
-
-
- # Run the program!
-
- chroot --userspec "$user":"$group" "$chroot" sh -c "
-
- cd \"$chdir\"
- exec \"$program\" $args
- " >> <%= "#{node['prometheus']['log_dir']}/prometheus.log" %> 2>&1 &
-
- # Generate the pidfile from here. If we instead made the forked process
- # generate it there will be a race condition between the pidfile writing
- # and a process possibly asking for status.
- echo $! > $pidfile
-
- emit "$name started"
- return 0
-}
-
-stop() {
- # Try a few times to kill TERM the program
- if status ; then
- pid=$(cat "$pidfile")
- trace "Killing $name (pid $pid) with SIGTERM"
- kill -TERM $pid
- # Wait for it to exit.
- for i in 1 2 3 4 5 ; do
- trace "Waiting $name (pid $pid) to die..."
- status || break
- sleep 1
- done
- if status ; then
- emit "$name stop failed; still running."
- else
- emit "$name stopped."
- fi
- fi
-}
-
-status() {
- if [ -f "$pidfile" ] ; then
- pid=$(cat "$pidfile")
- if ps -p $pid > /dev/null 2> /dev/null ; then
- # process by this pid is running.
- # It may not be our pid, but that's what you get with just pidfiles.
- # TODO(sissel): Check if this process seems to be the same as the one we
- # expect. It'd be nice to use flock here, but flock uses fork, not exec,
- # so it makes it quite awkward to use in this case.
- return 0
- else
- return 2 # program is dead but pid file exists
- fi
- else
- return 3 # program is not running
- fi
-}
-
-force_stop() {
- if status ; then
- stop
- status && kill -KILL $(cat "$pidfile")
- fi
-}
-
-
-case "$1" in
- force-start|start|stop|force-stop|restart)
- trace "Attempting '$1' on prometheus"
- ;;
-esac
-
-case "$1" in
- force-start)
- PRESTART=no
- exec "$0" start
- ;;
- start)
- status
- code=$?
- if [ $code -eq 0 ]; then
- emit "$name is already running"
- exit $code
- else
- start
- exit $?
- fi
- ;;
- stop) stop ;;
- force-stop) force_stop ;;
- status)
- status
- code=$?
- if [ $code -eq 0 ] ; then
- emit "$name is running"
- else
- emit "$name is not running"
- fi
- exit $code
- ;;
- restart)
-
- stop && start
- ;;
- *)
- echo "Usage: $SCRIPTNAME {start|stop|force-start|force-stop|status|restart}" >&2
- exit 3
- ;;
-esac
-
-exit $?
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/debian/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/debian/prometheus.erb
deleted file mode 100644
index 9e670e57aeec73dff636cabe9e8aac77d983fabe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/debian/prometheus.erb
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/bin/sh
-# Init script for prometheus
-# Maintained by
-# Generated by pleaserun.
-# Implemented based on LSB Core 3.1:
-# * Sections: 20.2, 20.3
-#
-### BEGIN INIT INFO
-# Provides: prometheus
-# Required-Start: $remote_fs $syslog
-# Required-Stop: $remote_fs $syslog
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description:
-# Description: no description given
-### END INIT INFO
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-export PATH
-
-name=$(basename $0)
-program="<%= node['prometheus']['binary'] %>"
-args="<%= generate_flags %>"
-pidfile=<%= node['prometheus']['pid'] %>
-user=<%= node['prometheus']['source']['user'] %>
-group=<%= node['prometheus']['source']['group'] %>
-chroot="/"
-chdir="/"
-
-[ -r /etc/default/$name ] && . /etc/default/$name
-[ -r /etc/sysconfig/$name ] && . /etc/sysconfig/$name
-
-trace() {
- logger -t "/etc/init.d/prometheus" "$@"
-}
-
-emit() {
- trace "$@"
- echo "$@"
-}
-
-start() {
-
-
- # Setup any environmental stuff beforehand
-
-
- # Run the program!
-
- chroot --userspec "$user":"$group" "$chroot" sh -c "
-
- cd \"$chdir\"
- exec \"$program\" $args
- " >> <%= "#{node['prometheus']['log_dir']}/prometheus.log" %> 2>&1 &
-
- # Generate the pidfile from here. If we instead made the forked process
- # generate it there will be a race condition between the pidfile writing
- # and a process possibly asking for status.
- echo $! > $pidfile
-
- emit "$name started"
- return 0
-}
-
-stop() {
- # Try a few times to kill TERM the program
- if status ; then
- pid=$(cat "$pidfile")
- trace "Killing $name (pid $pid) with SIGTERM"
- kill -TERM $pid
- # Wait for it to exit.
- for i in 1 2 3 4 5 ; do
- trace "Waiting $name (pid $pid) to die..."
- status || break
- sleep 1
- done
- if status ; then
- emit "$name stop failed; still running."
- else
- emit "$name stopped."
- fi
- fi
-}
-
-status() {
- if [ -f "$pidfile" ] ; then
- pid=$(cat "$pidfile")
- if ps -p $pid > /dev/null 2> /dev/null ; then
- # process by this pid is running.
- # It may not be our pid, but that's what you get with just pidfiles.
- # TODO(sissel): Check if this process seems to be the same as the one we
- # expect. It'd be nice to use flock here, but flock uses fork, not exec,
- # so it makes it quite awkward to use in this case.
- return 0
- else
- return 2 # program is dead but pid file exists
- fi
- else
- return 3 # program is not running
- fi
-}
-
-force_stop() {
- if status ; then
- stop
- status && kill -KILL $(cat "$pidfile")
- fi
-}
-
-
-case "$1" in
- force-start|start|stop|force-stop|restart)
- trace "Attempting '$1' on prometheus"
- ;;
-esac
-
-case "$1" in
- force-start)
- PRESTART=no
- exec "$0" start
- ;;
- start)
- status
- code=$?
- if [ $code -eq 0 ]; then
- emit "$name is already running"
- exit $code
- else
- start
- exit $?
- fi
- ;;
- stop) stop ;;
- force-stop) force_stop ;;
- status)
- status
- code=$?
- if [ $code -eq 0 ] ; then
- emit "$name is running"
- else
- emit "$name is not running"
- fi
- exit $code
- ;;
- restart)
-
- stop && start
- ;;
- *)
- echo "Usage: $SCRIPTNAME {start|stop|force-start|force-stop|status|restart}" >&2
- exit 3
- ;;
-esac
-
-exit $?
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.init.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.init.erb
deleted file mode 100644
index 1bf205d3675b596b8c58154a83a6b49cf8f94743..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.init.erb
+++ /dev/null
@@ -1,2 +0,0 @@
-# We should fill this in with a generic init file for alertmanager.
-#
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.pill.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.pill.erb
deleted file mode 100644
index 1fbde1f0a47746822dc46e99138bd9643696b69f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.pill.erb
+++ /dev/null
@@ -1,14 +0,0 @@
-
-Bluepill.application('alertmanager') do |app|
- app.process('alertmanager') do |process|
- process.pid_file = "<%= node['prometheus']['pid'] %>"
- process.working_dir = "<%= node['prometheus']['dir'] %>"
- process.start_command = "<%= node['prometheus']['alertmanager']['binary'] %> -log.level=debug \
- -storage.path="<%= node['prometheus']['dir'] %>/data" \
- -config.file="<%= node['prometheus']['alertmanager']['config.file'] %>" \
- -web.external-url="<%= node['prometheus']['flags']['alertmanager.url'] %>"
- process.environment = {'GOMAXPROCS' => <%= node['cpu']['total'] %>}
- process.stdout = process.stderr = "<%= node['prometheus']['log_dir'] %>/alertmanager.log"
- process.daemonize = true
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.yml.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.yml.erb
deleted file mode 100644
index 949efac1323051c96e931aee24ebdfbb89e3291c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/alertmanager.yml.erb
+++ /dev/null
@@ -1,115 +0,0 @@
-global:
- # The smarthost and SMTP sender used for mail notifications.
- smtp_smarthost: 'localhost:25'
- smtp_from: 'alertmanager@example.org'
-
-# The root route on which each incoming alert enters.
-route:
- # The root route must not have any matchers as it is the entry point for
- # all alerts. It needs to have a receiver configured so alerts that do not
- # match any of the sub-routes are sent to someone.
- receiver: 'team-X-mails'
-
- # The labels by which incoming alerts are grouped together. For example,
- # multiple alerts coming in for cluster=A and alertname=LatencyHigh would
- # be batched into a single group.
- group_by: ['alertname', 'cluster']
-
- # When a new group of alerts is created by an incoming alert, wait at
- # least 'group_wait' to send the initial notification.
- # This way ensures that you get multiple alerts for the same group that start
- # firing shortly after another are batched together on the first
- # notification.
- group_wait: 30s
-
- # When the first notification was sent, wait 'group_interval' to send a batch
- # of new alerts that started firing for that group.
- group_interval: 5m
-
- # If an alert has successfully been sent, wait 'repeat_interval' to
- # resend them.
- repeat_interval: 3h
-
- # All the above attributes are inherited by all child routes and can
- # overwritten on each.
-
- # The child route trees.
- routes:
- # This routes performs a regular expression match on alert labels to
- # catch alerts that are related to a list of services.
- - match_re:
- service: ^(foo1|foo2|baz)$
- receiver: team-X-mails
-
- # The service has a sub-route for critical alerts, any alerts
- # that do not match, i.e. severity != critical, fall-back to the
- # parent node and are sent to 'team-X-mails'
- routes:
- - match:
- severity: critical
- receiver: team-X-pager
-
- - match:
- service: files
- receiver: team-Y-mails
-
- routes:
- - match:
- severity: critical
- receiver: team-Y-pager
-
- # This route handles all alerts coming from a database service. If there's
- # no team to handle it, it defaults to the DB team.
- - match:
- service: database
-
- receiver: team-DB-pager
- # Also group alerts by affected database.
- group_by: [alertname, cluster, database]
-
- routes:
- - match:
- owner: team-X
- receiver: team-X-pager
-
- - match:
- owner: team-Y
- receiver: team-Y-pager
-
-
-# Inhibition rules allow to mute a set of alerts given that another alert is
-# firing.
-# We use this to mute any warning-level notifications if the same alert is
-# already critical.
-inhibit_rules:
-- source_match:
- severity: 'critical'
- target_match:
- severity: 'warning'
- # Apply inhibition if the alertname is the same.
- equal: ['alertname']
-
-
-receivers:
-- name: 'team-X-mails'
- email_configs:
- - to: 'team-X+alerts@example.org'
-
-- name: 'team-X-pager'
- email_configs:
- - to: 'team-X+alerts-critical@example.org'
- pagerduty_configs:
- - service_key:
-
-- name: 'team-Y-mails'
- email_configs:
- - to: 'team-Y+alerts@example.org'
-
-- name: 'team-Y-pager'
- pagerduty_configs:
- - service_key:
-
-- name: 'team-DB-pager'
- pagerduty_configs:
- - service_key:
-
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/alertmanager.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/alertmanager.erb
deleted file mode 100644
index 27eca88db52dd6156738759edd02f8178ce6146a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/alertmanager.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-# Configuration file for the alertmanager service
-
-GOMAXPROCS=<%= node['cpu']['total'] %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/prometheus.erb
deleted file mode 100644
index 523b1543e5f2b7a9c1735f25d633a3a1af03e503..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/debian/default/prometheus.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-# Configuration file for the prometheus service
-
-GOMAXPROCS=<%= node['cpu']['total'] %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.pill.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.pill.erb
deleted file mode 100644
index ad5d72d003145af54155d9a0c69665cf1b49184c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.pill.erb
+++ /dev/null
@@ -1,11 +0,0 @@
-
-Bluepill.application('prometheus') do |app|
- app.process('prometheus') do |process|
- process.pid_file = "<%= node['prometheus']['pid'] %>"
- process.working_dir = "<%= node['prometheus']['dir'] %>"
- process.start_command = "<%= node['prometheus']['binary'] %> <%= generate_flags %>"
- process.environment = {'GOMAXPROCS' => <%= node['cpu']['total'] %>}
- process.stdout = process.stderr = "<%= node['prometheus']['log_dir'] %>/prometheus.log"
- process.daemonize = true
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.yml.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.yml.erb
deleted file mode 100644
index b42804b02c37758c039724860d226ea2d1b6e187..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/prometheus.yml.erb
+++ /dev/null
@@ -1,34 +0,0 @@
-# Global default settings.
-global:
- <%node['prometheus']['global'].each do |k,v|%>
- <%=k%>: "<%=v%>"
- <%end%>
-
-scrape_configs:
-<% @jobs.each do |name, job| %>
-- job_name: "<%= name %>"
- <% if job['scrape_interval'] %>
- scrape_interval: "<%= job['scrape_interval'] %>"
- <% end %>
- <% if job['scrape_timeout'] %>
- scrape_timeout: "<%= job['scrape_timeout'] %>"
- <% end %>
- metrics_path: "<%= job['metrics_path'] %>"
- static_configs:
- - targets: <%= Array(job['target']) %>
- <%if job['labels'] %>
- labels:
- <% job['labels'].each do |label,label_config| %>
- <%=label%>: <%=label_config%>
- <%end%>
- <%end%>
-
- <% end %>
-
-<% if @rule_filenames %>
-rule_files:
-<% @rule_filenames.each do |filename| %>
- - <%= filename %>
-<% end %>
-<% end %>
-
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/alertmanager.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/alertmanager.erb
deleted file mode 100644
index 27eca88db52dd6156738759edd02f8178ce6146a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/alertmanager.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-# Configuration file for the alertmanager service
-
-GOMAXPROCS=<%= node['cpu']['total'] %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/prometheus.erb
deleted file mode 100644
index 523b1543e5f2b7a9c1735f25d633a3a1af03e503..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/redhat/sysconfig/prometheus.erb
+++ /dev/null
@@ -1,3 +0,0 @@
-# Configuration file for the prometheus service
-
-GOMAXPROCS=<%= node['cpu']['total'] %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/sv-prometheus-run.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/sv-prometheus-run.erb
deleted file mode 100644
index 93f22fc1eac109362ce419ca13ed86f7ccef2b39..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/sv-prometheus-run.erb
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-exec 2>&1
-export GOMAXPROCS=<%= node['cpu']['total'] %>
-exec <%= node['prometheus']['binary'] %> <%= generate_flags %>
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/alertmanager.service.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/alertmanager.service.erb
deleted file mode 100644
index 8fa25fe257af8bd4306bc5031036165c0a1d7fde..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/alertmanager.service.erb
+++ /dev/null
@@ -1,16 +0,0 @@
-[Unit]
-Description=Prometheus Alertmanager
-After=network.target
-
-[Service]
-ExecStart=<%= node['prometheus']['alertmanager']['binary'] %> \
- --log.level=debug \
- --storage.path=<%= node['prometheus']['alertmanager']['storage.path'] %> \
- --config.file=<%= node['prometheus']['alertmanager']['config.file'] %> \
- --web.external-url=<%= node['prometheus']['flags']['alertmanager.url'] %>
-User=<%= node['prometheus']['user'] %>
-Group=<%= node['prometheus']['group'] %>
-Restart=always
-
-[Install]
-WantedBy=multi-user.target
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/prometheus.service.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/prometheus.service.erb
deleted file mode 100644
index 43e7b9f46a45cf38f222492e3bf6fc27bb2267ef..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/systemd/prometheus.service.erb
+++ /dev/null
@@ -1,15 +0,0 @@
-[Unit]
-Description=Prometheus
-After=network.target auditd.service
-
-[Service]
-Type=simple
-EnvironmentFile=<%= @sysconfig_file %>
-User=<%= node['prometheus']['user'] %>
-Group=<%= node['prometheus']['group'] %>
-ExecStart=<%= node['prometheus']['binary'] %> <%= generate_flags %>
-ExecReload=/bin/kill -HUP $MAINPID
-Restart=always
-
-[Install]
-WantedBy=multi-user.target
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/alertmanager.service.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/alertmanager.service.erb
deleted file mode 100644
index d64776706c0788bfeaa92785a56b91cdebbdb6fd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/alertmanager.service.erb
+++ /dev/null
@@ -1,14 +0,0 @@
-description "Prometheus Alertmanager"
-start on (local-filesystems and net-device-up IFACE!=lo)
-stop on runlevel [016]
-
-respawn
-env GOMAXPROCS=<%= node['cpu']['total'] %>
-setuid <%= node['prometheus']['user'] %>
-setgid <%= node['prometheus']['group'] %>
-
-script
- exec >> "<%= node['prometheus']['log_dir'] %>/alertmanager.log"
- exec 2>&1
- exec <%= node['prometheus']['alertmanager']['binary'] %> -config.file=<%= node['prometheus']['alertmanager']['config.file'] %> -storage.path=<%= node['prometheus']['alertmanager']['storage.path'] %> -web.external-url=<%= node['prometheus']['flags']['alertmanager.url'] %>
-end script
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/prometheus.service.erb b/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/prometheus.service.erb
deleted file mode 100644
index f8ee88f8184ef1ff77434e557f51a7c4e0ce1a42..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/default/upstart/prometheus.service.erb
+++ /dev/null
@@ -1,15 +0,0 @@
-description "Prometheus"
-start on (local-filesystems and net-device-up IFACE!=lo)
-stop on runlevel [016]
-
-respawn
-env GOMAXPROCS=<%= node['cpu']['total'] %>
-setuid <%= node['prometheus']['user'] %>
-setgid <%= node['prometheus']['user'] %>
-
-script
- chdir <%= node['prometheus']['dir'] %>
- exec >> "<%= node['prometheus']['log_dir'] %>/prometheus.log"
- exec 2>&1
- exec <%= node['prometheus']['binary'] %> <%= generate_flags %>
-end script
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/redhat/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/redhat/prometheus.erb
deleted file mode 100644
index 9e670e57aeec73dff636cabe9e8aac77d983fabe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/redhat/prometheus.erb
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/bin/sh
-# Init script for prometheus
-# Maintained by
-# Generated by pleaserun.
-# Implemented based on LSB Core 3.1:
-# * Sections: 20.2, 20.3
-#
-### BEGIN INIT INFO
-# Provides: prometheus
-# Required-Start: $remote_fs $syslog
-# Required-Stop: $remote_fs $syslog
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description:
-# Description: no description given
-### END INIT INFO
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-export PATH
-
-name=$(basename $0)
-program="<%= node['prometheus']['binary'] %>"
-args="<%= generate_flags %>"
-pidfile=<%= node['prometheus']['pid'] %>
-user=<%= node['prometheus']['source']['user'] %>
-group=<%= node['prometheus']['source']['group'] %>
-chroot="/"
-chdir="/"
-
-[ -r /etc/default/$name ] && . /etc/default/$name
-[ -r /etc/sysconfig/$name ] && . /etc/sysconfig/$name
-
-trace() {
- logger -t "/etc/init.d/prometheus" "$@"
-}
-
-emit() {
- trace "$@"
- echo "$@"
-}
-
-start() {
-
-
- # Setup any environmental stuff beforehand
-
-
- # Run the program!
-
- chroot --userspec "$user":"$group" "$chroot" sh -c "
-
- cd \"$chdir\"
- exec \"$program\" $args
- " >> <%= "#{node['prometheus']['log_dir']}/prometheus.log" %> 2>&1 &
-
- # Generate the pidfile from here. If we instead made the forked process
- # generate it there will be a race condition between the pidfile writing
- # and a process possibly asking for status.
- echo $! > $pidfile
-
- emit "$name started"
- return 0
-}
-
-stop() {
- # Try a few times to kill TERM the program
- if status ; then
- pid=$(cat "$pidfile")
- trace "Killing $name (pid $pid) with SIGTERM"
- kill -TERM $pid
- # Wait for it to exit.
- for i in 1 2 3 4 5 ; do
- trace "Waiting $name (pid $pid) to die..."
- status || break
- sleep 1
- done
- if status ; then
- emit "$name stop failed; still running."
- else
- emit "$name stopped."
- fi
- fi
-}
-
-status() {
- if [ -f "$pidfile" ] ; then
- pid=$(cat "$pidfile")
- if ps -p $pid > /dev/null 2> /dev/null ; then
- # process by this pid is running.
- # It may not be our pid, but that's what you get with just pidfiles.
- # TODO(sissel): Check if this process seems to be the same as the one we
- # expect. It'd be nice to use flock here, but flock uses fork, not exec,
- # so it makes it quite awkward to use in this case.
- return 0
- else
- return 2 # program is dead but pid file exists
- fi
- else
- return 3 # program is not running
- fi
-}
-
-force_stop() {
- if status ; then
- stop
- status && kill -KILL $(cat "$pidfile")
- fi
-}
-
-
-case "$1" in
- force-start|start|stop|force-stop|restart)
- trace "Attempting '$1' on prometheus"
- ;;
-esac
-
-case "$1" in
- force-start)
- PRESTART=no
- exec "$0" start
- ;;
- start)
- status
- code=$?
- if [ $code -eq 0 ]; then
- emit "$name is already running"
- exit $code
- else
- start
- exit $?
- fi
- ;;
- stop) stop ;;
- force-stop) force_stop ;;
- status)
- status
- code=$?
- if [ $code -eq 0 ] ; then
- emit "$name is running"
- else
- emit "$name is not running"
- fi
- exit $code
- ;;
- restart)
-
- stop && start
- ;;
- *)
- echo "Usage: $SCRIPTNAME {start|stop|force-start|force-stop|status|restart}" >&2
- exit 3
- ;;
-esac
-
-exit $?
diff --git a/lc-gdn-chef/cookbooks/prometheus/templates/ubuntu/prometheus.erb b/lc-gdn-chef/cookbooks/prometheus/templates/ubuntu/prometheus.erb
deleted file mode 100644
index 9e670e57aeec73dff636cabe9e8aac77d983fabe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/templates/ubuntu/prometheus.erb
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/bin/sh
-# Init script for prometheus
-# Maintained by
-# Generated by pleaserun.
-# Implemented based on LSB Core 3.1:
-# * Sections: 20.2, 20.3
-#
-### BEGIN INIT INFO
-# Provides: prometheus
-# Required-Start: $remote_fs $syslog
-# Required-Stop: $remote_fs $syslog
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description:
-# Description: no description given
-### END INIT INFO
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-export PATH
-
-name=$(basename $0)
-program="<%= node['prometheus']['binary'] %>"
-args="<%= generate_flags %>"
-pidfile=<%= node['prometheus']['pid'] %>
-user=<%= node['prometheus']['source']['user'] %>
-group=<%= node['prometheus']['source']['group'] %>
-chroot="/"
-chdir="/"
-
-[ -r /etc/default/$name ] && . /etc/default/$name
-[ -r /etc/sysconfig/$name ] && . /etc/sysconfig/$name
-
-trace() {
- logger -t "/etc/init.d/prometheus" "$@"
-}
-
-emit() {
- trace "$@"
- echo "$@"
-}
-
-start() {
-
-
- # Setup any environmental stuff beforehand
-
-
- # Run the program!
-
- chroot --userspec "$user":"$group" "$chroot" sh -c "
-
- cd \"$chdir\"
- exec \"$program\" $args
- " >> <%= "#{node['prometheus']['log_dir']}/prometheus.log" %> 2>&1 &
-
- # Generate the pidfile from here. If we instead made the forked process
- # generate it there will be a race condition between the pidfile writing
- # and a process possibly asking for status.
- echo $! > $pidfile
-
- emit "$name started"
- return 0
-}
-
-stop() {
- # Try a few times to kill TERM the program
- if status ; then
- pid=$(cat "$pidfile")
- trace "Killing $name (pid $pid) with SIGTERM"
- kill -TERM $pid
- # Wait for it to exit.
- for i in 1 2 3 4 5 ; do
- trace "Waiting $name (pid $pid) to die..."
- status || break
- sleep 1
- done
- if status ; then
- emit "$name stop failed; still running."
- else
- emit "$name stopped."
- fi
- fi
-}
-
-status() {
- if [ -f "$pidfile" ] ; then
- pid=$(cat "$pidfile")
- if ps -p $pid > /dev/null 2> /dev/null ; then
- # process by this pid is running.
- # It may not be our pid, but that's what you get with just pidfiles.
- # TODO(sissel): Check if this process seems to be the same as the one we
- # expect. It'd be nice to use flock here, but flock uses fork, not exec,
- # so it makes it quite awkward to use in this case.
- return 0
- else
- return 2 # program is dead but pid file exists
- fi
- else
- return 3 # program is not running
- fi
-}
-
-force_stop() {
- if status ; then
- stop
- status && kill -KILL $(cat "$pidfile")
- fi
-}
-
-
-case "$1" in
- force-start|start|stop|force-stop|restart)
- trace "Attempting '$1' on prometheus"
- ;;
-esac
-
-case "$1" in
- force-start)
- PRESTART=no
- exec "$0" start
- ;;
- start)
- status
- code=$?
- if [ $code -eq 0 ]; then
- emit "$name is already running"
- exit $code
- else
- start
- exit $?
- fi
- ;;
- stop) stop ;;
- force-stop) force_stop ;;
- status)
- status
- code=$?
- if [ $code -eq 0 ] ; then
- emit "$name is running"
- else
- emit "$name is not running"
- fi
- exit $code
- ;;
- restart)
-
- stop && start
- ;;
- *)
- echo "Usage: $SCRIPTNAME {start|stop|force-start|force-stop|status|restart}" >&2
- exit 3
- ;;
-esac
-
-exit $?
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/alertmanager/serverspec/default_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/alertmanager/serverspec/default_spec.rb
deleted file mode 100644
index ceb8a0fba8135e3d38fa6681178f4425d0f4bb95..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/alertmanager/serverspec/default_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'alertmanager service' do
- describe service('alertmanager') do
- it { should be_running }
- end
-
- describe port(9093) do
- it { should be_listening.with('tcp') }
- end
-end
-
-describe 'alertmanger should be exposing metrics' do
- describe command("curl 'http://localhost:9093/#/alerts/'") do
- its(:stdout) { should include('Alertmanager') }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill-binary/serverspec/binary_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill-binary/serverspec/binary_spec.rb
deleted file mode 100644
index 95956e9762929a1fc5c52b09b5381169c4ac6d72..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill-binary/serverspec/binary_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe command('/opt/chef/embedded/bin/bluepill status prometheus') do
- its(:stdout) { should match(/up/) }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-
- describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill/serverspec/source_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill/serverspec/source_spec.rb
deleted file mode 100644
index 6cdbf3a7eead0875a135151a36a6713d22aec9c8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/bluepill/serverspec/source_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe command('/opt/chef/embedded/bin/bluepill status prometheus') do
- its(:stdout) { should match(/up/) }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-end
-
-describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/default-binary/serverspec/binary_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/default-binary/serverspec/binary_spec.rb
deleted file mode 100644
index a31c61417162984e84da6950d3837fecf9986344..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/default-binary/serverspec/binary_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe service('prometheus') do
- it { should be_running }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-
- describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/default/serverspec/source_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/default/serverspec/source_spec.rb
deleted file mode 100644
index 4ba8302c69856f8c6a137fc16e040de6df91ce44..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/default/serverspec/source_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe service('prometheus') do
- it { should be_running }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-end
-
-describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/init-binary/serverspec/binary_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/init-binary/serverspec/binary_spec.rb
deleted file mode 100644
index e0ff2c03edd04d00238691b3001095e0968799fe..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/init-binary/serverspec/binary_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe command('/etc/init.d/prometheus status') do
- its(:stdout) { should match(/prometheus is running/) }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-
- describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/integration/init/serverspec/source_spec.rb b/lc-gdn-chef/cookbooks/prometheus/test/integration/init/serverspec/source_spec.rb
deleted file mode 100644
index 05c422acebb5b2473e825fb18a1b75222721b809..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/integration/init/serverspec/source_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require_relative '../../../kitchen/data/spec_helper'
-
-describe 'prometheus service' do
- describe command('/etc/init.d/prometheus status') do
- its(:stdout) { should match(/prometheus is running/) }
- end
-
- describe port(9090) do
- it { should be_listening }
- end
-end
-
-describe 'prometheus should be exposing metrics' do
- describe command("curl 'http://localhost:9090/metrics'") do
- its(:stdout) { should match(/prometheus_notifications_queue_capacity 100/) }
- end
-end
diff --git a/lc-gdn-chef/cookbooks/prometheus/test/shared/spec_helper.rb b/lc-gdn-chef/cookbooks/prometheus/test/shared/spec_helper.rb
deleted file mode 100644
index 071e57fc4c88f98fbce503cbdc7aa8914ef2479f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/prometheus/test/shared/spec_helper.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'serverspec'
-
-# Required by serverspec
-set :backend, :exec
-
-Dir[File.expand_path('../support/**/*.rb', __FILE__)].each { |file| require_relative(file) }
-
-RSpec.configure do |config|
- config.before(:all) do
- config.path = '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/CHANGELOG.md b/lc-gdn-chef/cookbooks/selinux/CHANGELOG.md
deleted file mode 100644
index 00aaa4f03aa0598fbc8aef0fdd6d73cec72eb569..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/CHANGELOG.md
+++ /dev/null
@@ -1,205 +0,0 @@
-# selinux Cookbook CHANGELOG
-
-This file is used to list changes made in each version of the selinux cookbook.
-
-## 6.0.4 - *2022-02-17*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 6.0.3 - *2022-02-08*
-
-- Remove delivery folder
-
-## 6.0.2 - *2022-01-01*
-
-- resolved cookstyle error: resources/install.rb:5:1 refactor: `Chef/Style/CopyrightCommentFormat`
-- resolved cookstyle error: resources/module.rb:5:1 refactor: `Chef/Style/CopyrightCommentFormat`
-- resolved cookstyle error: resources/state.rb:5:1 refactor: `Chef/Style/CopyrightCommentFormat`
-
-## 6.0.1 - *2021-11-03*
-
-- Correctly parse ports with multple contexts
-
-## 6.0.0 - *2021-09-02*
-
-- Import `selinux_policy` resources into this cookbook (`_fcontext`, `_permissive`, and `_port`)
- - `selinux_policy_module` not imported since it is a duplicate of `selinux_module`
-
-### Deprecations
-
-- `selinux_fcontext` action `addormodify` renamed to `manage`
-- `selinux_port` action `addormodify` renamed to `manage`
-
-## 5.1.1 - *2021-08-30*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 5.1.0 - *2021-08-21*
-
-- Fix `selinux_install` on Alma Linux / Oracle Linux
-
-## 5.0.0 - *2021-08-10*
-
-### Note: With version 5.0.0 the default recipe has been removed
-
-- Major refactoring
-- Restore support for Debian based distros
-- All resources now use unified_mode
-- Added selinux_boolean resource
-- Remove attributes and default recipe
- - Replaced with a set of bare recipes for the three selinux states
-- Add automatic restart function to `selinux_state` resource
-
-## 4.0.0 - *2021-07-21*
-
-- Sous Chefs adoption
-- Enable `unified_mode` for Chef 17 compatibility
-- Update test platforms
-
-## 3.1.1 (2020-09-29)
-
-- Move `default['selinux']['status']` attribute to `default['selinux']['state']` to avoid conflicts with Ohai in Chef
- Infra Client 16 - [@shoekstra](https://github.com/shoekstra)
-
-## 3.1.0 (2020-09-29)
-
-- Cookstyle Bot Auto Corrections with Cookstyle 6.16.8 - [@cookstyle](https://github.com/cookstyle)
-- Add a new `node['selinux']['install_mcstrans_package']` attribute to control installation of the mcdtrans package.
- This default to true to maintain existing functionality. - [@kapilchouhan99](https://github.com/kapilchouhan99)
-
-## 3.0.2 (2020-08-25)
-
-- Fix failures in CI- [@shoekstra](https://github.com/shoekstra)
-- Specify platform to SoloRunner - [@shoekstra](https://github.com/shoekstra)
-- Remove unnecessary Foodcritic comments - [@tas50](https://github.com/tas50)
-- Notify :immediately not :immediate - [@tas50](https://github.com/tas50)
-- Add Github actions testing of style/unit - [@tas50](https://github.com/tas50)
-- [GH-67] - Do not try to modify frozen checksum - [@vzDevelopment](https://github.com/vzDevelopment)
-- Standardise files with files in chef-cookbooks/repo-management - [@xorimabot](https://github.com/xorimabot)
-
-## 3.0.1 (2019-11-14)
-
-- Remove the deprecated ChefSpec report - [@tas50](https://github.com/tas50)
-- Allow "-" and "_" for module names - [@ramereth](https://github.com/ramereth)
-- Update Fedora versions we test on - [@tas50](https://github.com/tas50)
-
-## 3.0.0 (2019-06-06)
-
-- Support for SELinux Modules, via new resource `selinux_module`, able to compile `.te` files, install and remove
- modules;
-- Improving test coverage for all resources
-- Remove support for Ubuntu/Debian
-- Require Chef 13+
-
-## 2.1.1 (2018-06-07)
-
-- Do not execute setenforce 1 always
-- Remove chefspec matchers that are autogenerated now
-- Chef 13 Fixes
-
-## 2.1.0 (2017-09-15)
-
-- Simplify Travis config and fix ChefDK 2.0 failures
-- Use bento slugs in Kitchen
-- Remove maintainer files
-- More cleanup of the maintainer files
-- Speed up install with multi-package install
-
-## 2.0.3 (2017-06-13)
-
-- Fix boolean check within default recipe
-
-## 2.0.2 (2017-06-05)
-
-- Permissive guard should grep for permissive not just disabled
-
-## 2.0.1 (2017-05-30)
-
-- Remove class_eval usage
-
-## 2.0.0 (2017-05-15)
-
-- Deprecate debian family support
-- Make default for rhel family use setenforce regardless of whether a temporary change or not. Eliminates the
- requirement for a required reboot to effect change in the running system.
-
-## 1.0.4 (2017-04-17)
-
-- Switch to local delivery for testing
-- Use the standard apache license string
-- Updates for early Chef 12 and Chef 13 compatibility
-- Update and add copyright blocks to the various files
-
-## 1.0.3 (2017-03-14)
-
-- Fix requirement in metadata to reflect need for Chef 12.7 as using action_class in state resource.
-
-## 1.0.2 (2017-03-01)
-
-- Remove setools* packages from install resource (utility to analyze and query policies, monitor and report audit logs,
- and manage file context). Future versions of this cookbook that might use this need to handle package install on
- Oracle Linux as not available in default repo.
-
-## 1.0.1 (2017-02-26)
-
-- Fix logic error in the permissive state change
-
-## 1.0.0 (2017-02-26)
-
-- **BREAKING CHANGE** `node['selinux']['state']` is now `node['selinux']['status']` to meet Chef 13 requirements.
-- Update to current cookbook engineering standards
-- Rewrite LWRP to 12.5 resources
-- Resolved cookstyle errors
-- Update package information for debian based on
-
- - selinux-activate looks like it's required to ACTUALLY activate selinux on non-RHEL systems. This seems like it
- could be destructive if unexpected.
-
-- Add property temporary to allow for switching between permissive and enabled
-
-- Add install resource
-
-## v0.9.0 (2015-02-22)
-
-- Initial Debian / Ubuntu support
-- Various bug fixes
-
-## v0.8.0 (2014-04-23)
-
-- [COOK-4528] - Fix selinux directory permissions
-- [COOK-4562] - Basic support for Ubuntu/Debian
-
-## v0.7.2 (2014-03-24)
-
-handling minimal installs
-
-## v0.7.0 (2014-02-27)
-
-[COOK-4218] Support setting SELinux boolean values
-
-## v0.6.2
-
-- Fixing bug introduced in 0.6.0
-- adding basic test-kitchen coverage
-
-## v0.6.0
-
-- [COOK-760] - selinux enforce/permit/disable based on attribute
-
-## v0.5.6
-
-- [COOK-2124] - enforcing recipe fails if selinux is disabled
-
-## v0.5.4
-
-- [COOK-1277] - disabled recipe fails on systems w/o selinux installed
-
-## v0.5.2
-
-- [COOK-789] - fix dangling commas causing syntax error on some rubies
-
-## v0.5.0
-
-- [COOK-678] - add the selinux cookbook to the repository
-- Use main selinux config file (/etc/selinux/config)
-- Use getenforce instead of selinuxenabled for enforcing and permissive
diff --git a/lc-gdn-chef/cookbooks/selinux/LICENSE b/lc-gdn-chef/cookbooks/selinux/LICENSE
deleted file mode 100644
index 8dada3edaf50dbc082c9a125058f25def75e625a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/selinux/README.md b/lc-gdn-chef/cookbooks/selinux/README.md
deleted file mode 100644
index 35ff4bc290cfcc8ccf340db77be2989de96ce009..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/README.md
+++ /dev/null
@@ -1,85 +0,0 @@
-# SELinux Cookbook
-
-[](https://supermarket.chef.io/cookbooks/selinux)
-[](https://github.com/sous-chefs/selinux/actions?query=workflow%3Aci)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-## Description
-
-The SELinux (Security Enhanced Linux) cookbook provides recipes for manipulating SELinux policy enforcement state.
-
-SELinux can have one of three settings:
-
-`Enforcing`
-
-- Watches all system access checks, stops all 'Denied access'
-- Default mode on RHEL systems
-
-`Permissive`
-
-- Allows access but reports violations
-
-`Disabled`
-
-- Disables SELinux from the system but is only read at boot time. If you set this flag, you must reboot.
-
-Disable SELinux only if you plan to not use it. Use `Permissive` mode if you just need to debug your system.
-
-## Requirements
-
-- Chef 15.3 or higher
-
-## Platform
-
-- RHEL 7+
-- CentOS 7+
-- Fedora
-- Ubuntu
-- Debian
-
-## Resources
-
-The following resources are provided:
-
-- [selinux_boolean](documentation/selinux_boolean.md)
-- [selinux_fcontext](documentation/selinux_fcontext.md)
-- [selinux_install](documentation/selinux_install.md)
-- [selinux_module](documentation/selinux_module.md)
-- [selinux_permissive](documentation/selinux_permissive.md)
-- [selinux_port](documentation/selinux_port.md)
-- [selinux_state](documentation/selinux_state.md)
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working
-together to maintain important cookbooks. If you’d like to know more please
-visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack
-in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-## Contributors
-
-This project exists thanks to all the people
-who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/selinux/chefignore b/lc-gdn-chef/cookbooks/selinux/chefignore
deleted file mode 100644
index cc170ea79ed8bde58bbb77030c0c2ab70b959c21..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen.yml*
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/selinux/libraries/boolean.rb b/lc-gdn-chef/cookbooks/selinux/libraries/boolean.rb
deleted file mode 100644
index c64d07c8caa21416548ba4422e599c89c5fe5ab6..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/libraries/boolean.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-module SELinux
- module Cookbook
- module BooleanHelpers
- def selinux_bool(bool)
- if ['on', 'true', '1', true, 1].include?(bool)
- 'on'
- elsif ['off', 'false', '0', false, 0].include?(bool)
- 'off'
- else
- raise ArgumentError, "selinux_bool: Invalid selinux boolean value #{bool}"
- end
- end
-
- module_function :selinux_bool
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/libraries/install.rb b/lc-gdn-chef/cookbooks/selinux/libraries/install.rb
deleted file mode 100644
index b99c64f43f063c9590d0b120a547872de758ea5c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/libraries/install.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-module SELinux
- module Cookbook
- module InstallHelpers
- def default_install_packages
- case node['platform_family']
- when 'rhel', 'fedora', 'amazon'
- %w(make policycoreutils selinux-policy selinux-policy-targeted selinux-policy-devel libselinux-utils setools-console)
- when 'debian'
- if node['platform'] == 'ubuntu'
- if node['platform_version'].to_f == 18.04
- %w(make policycoreutils selinux selinux-basics selinux-policy-default selinux-policy-dev auditd setools)
- else
- %w(make policycoreutils selinux-basics selinux-policy-default selinux-policy-dev auditd setools)
- end
- else
- %w(make policycoreutils selinux-basics selinux-policy-default selinux-policy-dev auditd setools)
- end
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/libraries/state.rb b/lc-gdn-chef/cookbooks/selinux/libraries/state.rb
deleted file mode 100644
index 56ce2fbea569475310dce570c783551ed6a7550f..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/libraries/state.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-module SELinux
- module Cookbook
- module StateHelpers
- def selinux_disabled?
- selinux_state.eql?(:disabled)
- end
-
- def selinux_enforcing?
- selinux_state.eql?(:enforcing)
- end
-
- def selinux_permissive?
- selinux_state.eql?(:permissive)
- end
-
- def state_change_reboot_required?
- (selinux_disabled? && %i(enforcing permissive).include?(action)) || ((selinux_enforcing? || selinux_permissive?) && action == :disabled)
- end
-
- def selinux_state
- state = shell_out!('getenforce').stdout.strip.downcase.to_sym
- raise "Got unknown SELinux state #{state}" unless %i(disabled enforcing permissive).include?(state)
-
- state
- end
-
- def selinux_activate_required?
- return false unless platform_family?('debian')
-
- !File.read('/etc/default/grub').match?('security=selinux')
- end
-
- def default_policy_platform
- case node['platform_family']
- when 'rhel', 'fedora', 'amazon'
- 'targeted'
- when 'debian'
- 'default'
- end
- end
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/metadata.json b/lc-gdn-chef/cookbooks/selinux/metadata.json
deleted file mode 100644
index 80b8649297ae73c0c5e285161dc54cce2c8227f3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/metadata.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "selinux",
- "description": "Manages SELinux policy state and rules.",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "redhat": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "scientific": ">= 0.0.0",
- "oracle": ">= 0.0.0",
- "amazon": ">= 0.0.0",
- "fedora": ">= 0.0.0",
- "debian": ">= 0.0.0",
- "ubuntu": ">= 0.0.0"
- },
- "dependencies": {
- },
- "providing": {
- },
- "recipes": {
- },
- "version": "6.0.4",
- "source_url": "https://github.com/sous-chefs/selinux",
- "issues_url": "https://github.com/sous-chefs/selinux/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 15.3"
- ]
- ],
- "ohai_versions": [
- ],
- "gems": [
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/selinux/metadata.rb b/lc-gdn-chef/cookbooks/selinux/metadata.rb
deleted file mode 100644
index ebc209a264dc13bf4ad0983d55503b4f53ac5a66..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/metadata.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-name 'selinux'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-license 'Apache-2.0'
-description 'Manages SELinux policy state and rules.'
-version '6.0.4'
-source_url 'https://github.com/sous-chefs/selinux'
-issues_url 'https://github.com/sous-chefs/selinux/issues'
-chef_version '>= 15.3'
-
-%w(redhat centos scientific oracle amazon fedora debian ubuntu).each do |os|
- supports os
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/recipes/disabled.rb b/lc-gdn-chef/cookbooks/selinux/recipes/disabled.rb
deleted file mode 100644
index 35cedbb5428dcbf0e0f5712473dd2a262d0941bc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/recipes/disabled.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Cookbook:: selinux
-# Recipe:: disabled
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-selinux_install 'selinux'
-
-selinux_state 'disabled' do
- automatic_reboot true
- action :disabled
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/recipes/enforcing.rb b/lc-gdn-chef/cookbooks/selinux/recipes/enforcing.rb
deleted file mode 100644
index ae003d3b8634d680660334822fba34506d9396a8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/recipes/enforcing.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-
-#
-# Cookbook:: selinux
-# Recipe:: enforcing
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-selinux_install 'selinux'
-
-selinux_state 'enforcing' do
- automatic_reboot true
- action :enforcing
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/recipes/permissive.rb b/lc-gdn-chef/cookbooks/selinux/recipes/permissive.rb
deleted file mode 100644
index 3edba4a388ecad29ddd1311fd9bc5540f3b7edcc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/recipes/permissive.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Cookbook:: selinux
-# Recipe:: permissive
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-selinux_install 'selinux'
-
-selinux_state 'permissive' do
- automatic_reboot true
- action :permissive
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/boolean.rb b/lc-gdn-chef/cookbooks/selinux/resources/boolean.rb
deleted file mode 100644
index 0ed7444ae2bfebcf599cc9ed6896da70be439652..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/boolean.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: boolean
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-property :boolean, String,
- name_property: true,
- description: 'SELinux boolean to set'
-
-property :value, [Integer, String, true, false],
- required: true,
- equal_to: %w(on off),
- coerce: proc { |p| SELinux::Cookbook::BooleanHelpers.selinux_bool(p) },
- description: 'SELinux boolean value'
-
-property :persistent, [true, false],
- default: true,
- desired_state: false,
- description: 'Set to true for value setting to survive reboot'
-
-load_current_value do |new_resource|
- value shell_out!("getsebool #{new_resource.boolean}").stdout.split('-->').map(&:strip).last
-end
-
-action_class do
- include SELinux::Cookbook::StateHelpers
-end
-
-action :set do
- if selinux_disabled?
- Chef::Log.warn("Unable to set SELinux boolean #{new_resource.name} as SELinux is disabled")
- return
- end
-
- converge_if_changed do
- cmd = 'setsebool'
- cmd += ' -P' if new_resource.persistent
- cmd += " #{new_resource.boolean} #{new_resource.value}"
-
- shell_out!(cmd)
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/fcontext.rb b/lc-gdn-chef/cookbooks/selinux/resources/fcontext.rb
deleted file mode 100644
index ac2b4d60bc8e1cb72f8cf903846bed1e8a12a995..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/fcontext.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: fcontext
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-property :file_spec, String,
- name_property: true,
- description: 'Path to or regex matching the files or directoriesto label'
-
-property :secontext, String,
- required: %i(add modify manage),
- description: 'SELinux context to assign'
-
-property :file_type, String,
- default: 'a',
- equal_to: %w(a f d c b s l p),
- description: 'The type of the file being labeled'
-
-action_class do
- include SELinux::Cookbook::StateHelpers
-
- def current_file_context
- file_hash = {
- 'a' => 'all files',
- 'f' => 'regular file',
- 'd' => 'directory',
- 'c' => 'character device',
- 'b' => 'block device',
- 's' => 'socket',
- 'l' => 'symbolic link',
- 'p' => 'named pipe',
- }
-
- contexts = shell_out!('semanage fcontext -l').stdout.split("\n")
- # pull out file label from user:role:type:level context string
- contexts.grep(/^#{Regexp.escape(new_resource.file_spec)}\s+#{file_hash[new_resource.file_type]}/) do |c|
- c.match(/.+ (?.+):(?.+):(?.+):(?.+)$/)[:type]
- # match returns ['foo'] or [], shift converts that to 'foo' or nil
- end.shift
- end
-
- # Run restorecon to fix label
- # https://github.com/sous-chefs/selinux_policy/pull/72#issuecomment-338718721
- def relabel_files
- spec = new_resource.file_spec
- escaped = Regexp.escape spec
-
- # find common path between regex and string
- common = if spec == escaped
- spec
- else
- index = spec.size.times { |i| break i if spec[i] != escaped[i] }
- ::File.dirname spec[0...index]
- end
-
- # if path is not absolute, ignore it and search everything
- common = '/' if common[0] != '/'
-
- if ::File.exist? common
- shell_out!("find #{common.shellescape} -ignore_readdir_race -regextype posix-egrep -regex #{spec.shellescape} -prune -print0 | xargs -0 restorecon -iRv")
- end
- end
-end
-
-action :manage do
- run_action(:add)
- run_action(:modify)
-end
-
-action :addormodify do
- Chef::Log.warn('The :addormodify action for selinux_fcontext is deprecated and will be removed in a future release. Use the :manage action instead.')
- run_action(:manage)
-end
-
-# Create if doesn't exist, do not touch if fcontext is already registered
-action :add do
- if selinux_disabled?
- Chef::Log.warn("Unable to add SELinux fcontext #{new_resource.name} as SELinux is disabled")
- return
- end
-
- unless current_file_context
- converge_by "adding label #{new_resource.secontext} to #{new_resource.file_spec}" do
- shell_out!("semanage fcontext -a -f #{new_resource.file_type} -t #{new_resource.secontext} '#{new_resource.file_spec}'")
- relabel_files
- end
- end
-end
-
-# Only modify if fcontext exists & doesn't have the correct label already
-action :modify do
- if selinux_disabled?
- Chef::Log.warn("Unable to modify SELinux fcontext #{new_resource.name} as SELinux is disabled")
- return
- end
-
- if current_file_context && current_file_context != new_resource.secontext
- converge_by "modifying label #{new_resource.secontext} to #{new_resource.file_spec}" do
- shell_out!("semanage fcontext -m -f #{new_resource.file_type} -t #{new_resource.secontext} '#{new_resource.file_spec}'")
- relabel_files
- end
- end
-end
-
-# Delete if exists
-action :delete do
- if selinux_disabled?
- Chef::Log.warn("Unable to delete SELinux fcontext #{new_resource.name} as SELinux is disabled")
- return
- end
-
- if current_file_context
- converge_by "deleting label for #{new_resource.file_spec}" do
- shell_out!("semanage fcontext -d -f #{new_resource.file_type} '#{new_resource.file_spec}'")
- relabel_files
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/install.rb b/lc-gdn-chef/cookbooks/selinux/resources/install.rb
deleted file mode 100644
index 9948b5cdb8763e987523859df739d738016578af..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/install.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: install
-#
-# Copyright:: 2016-2022, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-unified_mode true
-
-include SELinux::Cookbook::InstallHelpers
-
-property :packages, [String, Array],
- default: lazy { default_install_packages },
- description: 'SELinux packages for system'
-
-action_class do
- def do_package_action(action)
- # friendly message for unsupported platforms
- raise "The platform #{node['platform']} is not currently supported by the `selinux_install` resource. Please file an issue at https://github.com/sous-chefs/selinux/issues/new with details on the platform this cookbook is running on." if new_resource.packages.nil?
-
- package 'selinux' do
- package_name new_resource.packages
- action action
- end
- end
-end
-
-action :install do
- do_package_action(action)
-
- directory '/etc/selinux' do
- owner 'root'
- group 'root'
- mode '0755'
- action :create
- end
-end
-
-%i(upgrade remove).each do |a|
- action a do
- do_package_action(a)
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/module.rb b/lc-gdn-chef/cookbooks/selinux/resources/module.rb
deleted file mode 100644
index e9b78266d68303c06f3c0e53b034a3800675494e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/module.rb
+++ /dev/null
@@ -1,125 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: module
-#
-# Copyright:: 2016-2022, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-unified_mode true
-
-property :module_name, String,
- name_property: true,
- description: 'Override the module name'
-
-property :source, String,
- description: 'Module source file name'
-
-property :content, String,
- description: 'Module source as String'
-
-property :cookbook, String,
- default: lazy { cookbook_name },
- description: 'Cookbook to source from module source file from'
-
-property :base_dir, String,
- default: '/etc/selinux/local',
- description: 'Directory to create module source file in'
-
-action_class do
- def selinux_module_filepath(type)
- path = ::File.join(new_resource.base_dir, "#{new_resource.module_name}")
- path.concat(".#{type}") if type
- end
-
- def list_installed_modules
- shell_out!('semodule --list-modules').stdout.split("\n").map { |x| x.split(/\s/).first }
- end
-end
-
-action :create do
- directory new_resource.base_dir
-
- if property_is_set?(:content)
- file selinux_module_filepath('te') do
- content new_resource.content
-
- mode '0600'
- owner 'root'
- group 'root'
-
- action :create
-
- notifies :run, "execute[Compiling SELinux modules at '#{new_resource.base_dir}']", :immediately
- end
- else
- cookbook_file selinux_module_filepath('te') do
- cookbook new_resource.cookbook
- source new_resource.source
-
- mode '0600'
- owner 'root'
- group 'root'
-
- action :create
-
- notifies :run, "execute[Compiling SELinux modules at '#{new_resource.base_dir}']", :immediately
- end
- end
-
- execute "Compiling SELinux modules at '#{new_resource.base_dir}'" do
- cwd new_resource.base_dir
- command "make -C #{new_resource.base_dir} -f /usr/share/selinux/devel/Makefile"
- timeout 120
- user 'root'
-
- action :nothing
-
- notifies :run, "execute[Install SELinux module '#{selinux_module_filepath('pp')}']", :immediately
- end
-
- raise "Compilation must have failed, no 'pp' file found at: '#{selinux_module_filepath('pp')}'" unless ::File.exist?(selinux_module_filepath('pp'))
-
- execute "Install SELinux module '#{selinux_module_filepath('pp')}'" do
- command "semodule --install '#{selinux_module_filepath('pp')}'"
- action :nothing
- end
-end
-
-action :delete do
- %w(fc if pp te).each do |type|
- next unless ::File.exist?(selinux_module_filepath(type))
-
- file selinux_module_filepath(type) do
- action :delete
- end
- end
-end
-
-action :install do
- raise "Module must be compiled before it can be installed, no 'pp' file found at: '#{selinux_module_filepath('pp')}'" unless ::File.exist?(selinux_module_filepath('pp'))
-
- unless list_installed_modules.include? new_resource.module_name
- converge_by "Install SELinux module #{selinux_module_filepath('pp')}" do
- shell_out!("semodule --install '#{selinux_module_filepath('pp')}'")
- end
- end
-end
-
-action :remove do
- if list_installed_modules.include? new_resource.module_name
- converge_by "Remove SELinux module #{new_resource.module_name}" do
- shell_out!("semodule --remove '#{new_resource.module_name}'")
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/permissive.rb b/lc-gdn-chef/cookbooks/selinux/resources/permissive.rb
deleted file mode 100644
index 161b02c9f8af6cbd905e7c6e8f756d1253cb47dd..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/permissive.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: permissive
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-property :context, String,
- name_property: true,
- description: 'The SELinux context to permit'
-
-action_class do
- def current_permissives
- shell_out!('semanage permissive -ln').stdout.split("\n")
- end
-end
-
-# Create if doesn't exist, do not touch if permissive is already registered (even under different type)
-action :add do
- unless current_permissives.include? new_resource.context
- converge_by "adding permissive context #{new_resource.context}" do
- shell_out!("semanage permissive -a '#{new_resource.context}'")
- end
- end
-end
-
-# Delete if exists
-action :delete do
- if current_permissives.include? new_resource.context
- converge_by "deleting permissive context #{new_resource.context}" do
- shell_out!("semanage permissive -d '#{new_resource.context}'")
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/port.rb b/lc-gdn-chef/cookbooks/selinux/resources/port.rb
deleted file mode 100644
index b8393ca03936742fef74240f89dfa6730b84cc8b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/port.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: port
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-unified_mode true
-
-property :port, [Integer, String],
- name_property: true,
- regex: /^\d+$/,
- description: 'Port to modify'
-
-property :protocol, String,
- equal_to: %w(tcp udp),
- required: %i(manage add modify),
- description: 'Protocol to modify'
-
-property :secontext, String,
- required: %i(manage add modify),
- description: 'SELinux context to assign to the port'
-
-action_class do
- include SELinux::Cookbook::StateHelpers
-
- def current_port_context
- # use awk to see if the given port is within a reported port range
- shell_out!(
- <<~CMD
- seinfo --portcon=#{new_resource.port} | grep 'portcon #{new_resource.protocol}' | \
- awk -F: '$(NF-1) !~ /reserved_port_t$/ && $(NF-3) !~ /[0-9]*-[0-9]*/ {print $(NF-1)}'
- CMD
- ).stdout.split
- end
-end
-
-action :manage do
- run_action(:add)
- run_action(:modify)
-end
-
-action :addormodify do
- Chef::Log.warn('The :addormodify action for selinux_port is deprecated and will be removed in a future release. Use the :manage action instead.')
- run_action(:manage)
-end
-
-# Create if doesn't exist, do not touch if port is already registered (even under different type)
-action :add do
- if selinux_disabled?
- Chef::Log.warn("Unable to add SELinux port #{new_resource.name} as SELinux is disabled")
- return
- end
-
- if current_port_context.empty?
- converge_by "Adding context #{new_resource.secontext} to port #{new_resource.port}/#{new_resource.protocol}" do
- shell_out!("semanage port -a -t '#{new_resource.secontext}' -p #{new_resource.protocol} #{new_resource.port}")
- end
- end
-end
-
-# Only modify port if it exists & doesn't have the correct context already
-action :modify do
- if selinux_disabled?
- Chef::Log.warn("Unable to modify SELinux port #{new_resource.name} as SELinux is disabled")
- return
- end
-
- if !current_port_context.empty? && !current_port_context.include?(new_resource.secontext)
- converge_by "Modifying context #{new_resource.secontext} to port #{new_resource.port}/#{new_resource.protocol}" do
- shell_out!("semanage port -m -t '#{new_resource.secontext}' -p #{new_resource.protocol} #{new_resource.port}")
- end
- end
-end
-
-# Delete if exists
-action :delete do
- if selinux_disabled?
- Chef::Log.warn("Unable to delete SELinux port #{new_resource.name} as SELinux is disabled")
- return
- end
-
- unless current_port_context.empty?
- converge_by "Deleting context from port #{new_resource.port}/#{new_resource.protocol}" do
- shell_out!("semanage port -d -p #{new_resource.protocol} #{new_resource.port}")
- end
- end
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/resources/state.rb b/lc-gdn-chef/cookbooks/selinux/resources/state.rb
deleted file mode 100644
index e3ea9fe270026dca3a7821679410fc5bdc422c91..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/resources/state.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-#
-# Cookbook:: selinux
-# Resource:: state
-#
-# Copyright:: 2016-2022, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-unified_mode true
-
-include SELinux::Cookbook::StateHelpers
-
-default_action :nothing
-
-property :config_file, String,
- default: '/etc/selinux/config'
-
-property :persistent, [true, false],
- default: true,
- description: 'Persist status update to the selinux configuration file'
-
-property :policy, String,
- default: lazy { default_policy_platform },
- equal_to: %w(default minimum mls src strict targeted),
- description: 'SELinux policy type'
-
-property :automatic_reboot, [true, false, Symbol],
- default: false,
- description: 'Perform an automatic node reboot if required for state change'
-
-deprecated_property_alias 'temporary', 'persistent', 'The temporary property was renamed persistent in the 4.0 release of this cookbook. Please update your cookbooks to use the new property name.'
-
-action_class do
- include SELinux::Cookbook::StateHelpers
-
- def render_selinux_template(action)
- Chef::Log.warn(
- 'It is advised to set the configuration first to permissive to relabel the filesystem prior to enforcing.'
- ) if selinux_disabled? && action == :enforcing
-
- unless new_resource.automatic_reboot
- Chef::Log.warn('Changes from disabled require a reboot.') if selinux_disabled? && %i(enforcing permissive).include?(action)
- Chef::Log.warn('Disabling selinux requires a reboot.') if (selinux_enforcing? || selinux_permissive?) && action == :disabled
- end
-
- template "#{action} selinux config" do
- path new_resource.config_file
- source 'selinux.erb'
- cookbook 'selinux'
- variables(
- selinux: action.to_s,
- selinuxtype: new_resource.policy
- )
- end
- end
-
- def node_selinux_restart
- unless new_resource.automatic_reboot
- Chef::Log.warn("SELinux state change to #{action} requires a manual reboot as SELinux is currently #{selinux_state} and automatic reboots are disabled.")
- return
- end
-
- outer_action = action
- reboot 'selinux_state_change' do
- delay_mins 1
- reason "SELinux state change to #{outer_action} from #{selinux_state}"
-
- action new_resource.automatic_reboot.is_a?(Symbol) ? new_resource.automatic_reboot : :reboot_now
- end
- end
-end
-
-action :enforcing do
- execute 'selinux-setenforce-enforcing' do
- command '/usr/sbin/setenforce 1'
- end unless selinux_disabled? || selinux_enforcing?
-
- execute 'debian-selinux-activate' do
- command '/usr/sbin/selinux-activate'
- end if selinux_activate_required?
-
- render_selinux_template(action) if new_resource.persistent
- node_selinux_restart if state_change_reboot_required?
-end
-
-action :permissive do
- execute 'selinux-setenforce-permissive' do
- command '/usr/sbin/setenforce 0'
- end unless selinux_disabled? || selinux_permissive?
-
- execute 'debian-selinux-activate' do
- command '/usr/sbin/selinux-activate'
- end if selinux_activate_required?
-
- render_selinux_template(action) if new_resource.persistent
- node_selinux_restart if state_change_reboot_required?
-end
-
-action :disabled do
- raise 'A non-persistent change to the disabled SELinux status is not possible.' unless new_resource.persistent
-
- render_selinux_template(action)
- node_selinux_restart if state_change_reboot_required?
-end
diff --git a/lc-gdn-chef/cookbooks/selinux/templates/debian/selinux.erb b/lc-gdn-chef/cookbooks/selinux/templates/debian/selinux.erb
deleted file mode 100644
index 7a403b69307be146ec1ec32ef2ae92a1f4271924..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/templates/debian/selinux.erb
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by Chef for <%= node['fqdn'] %>
-# Do NOT modify this file by hand.
-#
-
-# This file controls the state of SELinux on the system.
-# SELINUX= can take one of these three values:
-# enforcing - SELinux security policy is enforced.
-# permissive - SELinux prints warnings instead of enforcing.
-# disabled - No SELinux policy is loaded.
-SELINUX=<%= @selinux %>
-# SELINUXTYPE= can take one of these three values:
-# default - equivalent to the old strict and targeted policies
-# mls - Multi-Level Security (for military and educational use)
-# src - Custom policy built from source
-SELINUXTYPE=<%= @selinuxtype %>
-
-# SETLOCALDEFS= Check local definition changes
-SETLOCALDEFS=0
diff --git a/lc-gdn-chef/cookbooks/selinux/templates/default/selinux.erb b/lc-gdn-chef/cookbooks/selinux/templates/default/selinux.erb
deleted file mode 100644
index e1c84d83eff4b2cc09e1ae68ba814677cc85572b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/selinux/templates/default/selinux.erb
+++ /dev/null
@@ -1,15 +0,0 @@
-# Generated by Chef for <%= node['fqdn'] %>
-# Do NOT modify this file by hand.
-#
-
-# This file controls the state of SELinux on the system.
-# SELINUX= can take one of these three values:
-# enforcing - SELinux security policy is enforced.
-# permissive - SELinux prints warnings instead of enforcing.
-# disabled - No SELinux policy is loaded.
-SELINUX=<%= @selinux %>
-# SELINUXTYPE= can take one of these three values:
-# targeted - Targeted processes are protected,
-# minimum - Modification of targeted policy. Only selected processes are protected.
-# mls - Multi Level Security protection.
-SELINUXTYPE=<%= @selinuxtype %>
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/LICENSE b/lc-gdn-chef/cookbooks/wn_opentsdb/LICENSE
deleted file mode 100644
index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/README.md b/lc-gdn-chef/cookbooks/wn_opentsdb/README.md
deleted file mode 100644
index 648e59ebd4d094422cf06e46d82be9f2e4e78b10..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/README.md
+++ /dev/null
@@ -1,94 +0,0 @@
-wn_opentsdb Cookbook
-====================
-Installs and configures the OpenTSDB time series database
-
-Any included scripts probably came from (https://github.com/OpenTSDB/opentsdb)
-
-Requirements
-------------
-- This requires a working instance of Apache HBase, running either in standalone
-or distributed mode.
-
-Attributes
-----------
-* node['wn_opentsdb']['conf']['tsd.core.auto_create_metrics']
-* node['wn_opentsdb']['conf']['tsd.http.cachedir']
-* node['wn_opentsdb']['conf']['tsd.http.staticroot']
-* node['wn_opentsdb']['conf']['tsd.network.port']
-* node['wn_opentsdb']['conf']['tsd.storage.hbase.zk_quorum']
-
-Usage
------
-Include `wn_opentsdb` into your node's role or recipe. This will create an
-OpenTSDB service listening on TCP port 4242.
-
-By default this cookbook assumes you're running OpenTSDB on the same host
-as your HBase master server (e.g. for evaluation). If using a remote HBase
-server, you'll need to set the `tsd.storage.hbase.zk_quorum` attribute as
-mentioned below.
-
-`node['wn_opentsdb']['conf']` is a hash of key=>value attributes for
-configuring the OpenTSDB daemon (i.e. written to `opentsdb.conf`). Three
-attributes are required for operation:
-
-`node['wn_opentsdb']['conf']['tsd.http.cachedir']` - Path where TSD will
-write cached files for the web UI. Defaults to `/dev/shm/tsd-cache`.
-
-`node['wn_opentsdb']['conf']['tsd.http.staticroot']` - Path to assets for
-TSD's web UI. Defaults to `/usr/share/opentsdb/static`.
-
-`node['wn_opentsdb']['conf']['tsd.network.port']` - Port on which TSD
-will listen for TCP and HTTP requests. Defaults to `4242`.
-
-Additional configuration options for OpenTSDB may be added to the 'conf' hash,
-using the property name as specified in the OpenTSDB configuration guide. For
-example:
-
-`node['wn_opentsdb']['conf']['tsd.core.auto_create_metrics']`:
-Automatically create new metric names in the database. `true` will accept
-all metric names sent from tcollector. `false` means new metrics must be
-created by the OpenTSDB administrator before the metrics will be accepted.
-
-`node['wn_opentsdb']['conf']['tsd.storage.hbase.zk_quorum']`: The
-network address for Zookeeper so your HBase instances can be found. By default
-the cookbook assumes you're running OpenTSDB on the same host as your HBase
-master (e.g. standalone mode) and sets this to `127.0.0.1`.
-
-If you're using a remote HBase server setup, set this attribute to the IP
-address where ZooKeeper is located. If specifying an IPv6 address, you need to
-surround the address in brackets and include the port number,
-e.g. `[1:2:3:4:5:6:7:8]:2181`.
-
-`node['wn_opentsdb']['sysconfig']`: A hash of key=>value attributes for
-daemon defaults (i.e. `/etc/sysconfig/opentsdb`). By default this cookbook does
-not write use any defaults here, instead using their OpenTSDB configuration
-file equivalents.
-
-### Runbook
-
-* Web interface: http://hostname:4242/
-
-* TCP/HTTP endpoint for tcollectors: hostname, port 4242
-
-* Logs for OpenTSDB are located in `/var/log/opentsdb`.
-
-* On successful startup of OpenTSDB, you should see this in the log:
-`INFO [main] TSDMain: Ready to serve on /0.0.0.0:4242`
-
-* Checking health, connect to port 4242 and issue `version` or `stats`
-```
-$ echo version | nc localhost 4242
-net.opentsdb.tools 2.2.0 built at revision (MODIFIED)
-Built on 2016/02/14 13:22:24 +0000 by root@centos.localhost:/root/rpmbuild/BUILD/opentsdb-2.2.0
-```
-
-* Error `org.hbase.async.TableNotFoundException: "tsdb"`: This means the tsdb
-tables were not found in HBase. Run the `tools/create_table.sh` script on the
-HBase server to create them. Restart OpenTSDB after creating the tables.
-
-e.g.
-```env COMPRESSION=NONE HBASE_HOME=/opt/hbase tools/create_table.sh```
-
-* Error `[OpenTSDB I/O Worker #5] HBaseClient: Need to find the .META. region`:
-the HBase server went away or else lost connection.
-
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/attributes/default.rb b/lc-gdn-chef/cookbooks/wn_opentsdb/attributes/default.rb
deleted file mode 100644
index d2378533586061434d778ac8c026ebb26d598558..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/attributes/default.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
-
-default['wn_opentsdb'] = {
- 'sysconfig' => {},
- 'conf' => {
- 'tsd.core.auto_create_metrics' => true,
- 'tsd.http.cachedir' => '/dev/shm/tsd-cache',
- 'tsd.http.staticroot' => '/usr/share/opentsdb/static',
- 'tsd.network.port' => '4242',
- 'tsd.storage.hbase.zk_quorum' => '127.0.0.1',
- },
-}
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/logback.xml b/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/logback.xml
deleted file mode 100644
index 7f0fb57694a812801cdb93270f4d182e1dad76f9..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/logback.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-
-
-
-
-
-
- %d{ISO8601} %-5level [%thread] %logger{0}: %msg%n
-
-
-
-
-
- 1024
-
-
-
- /var/log/opentsdb/opentsdb.log
- true
-
-
- /var/log/opentsdb/opentsdb.log.%i
- 1
- 3
-
-
-
- 128MB
-
-
-
-
- %d{HH:mm:ss.SSS} %-5level [%logger{0}.%M] - %msg%n
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/opentsdb.init b/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/opentsdb.init
deleted file mode 100755
index 5f4ee1d8d58643d5b24d094be75bd6787b404987..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/opentsdb.init
+++ /dev/null
@@ -1,175 +0,0 @@
-#!/bin/sh
-#
-# opentsdb This shell script takes care of starting and stopping OpenTSDB.
-#
-# chkconfig: 35 99 01
-# description: OpenTSDB is a distributed, scalable Time Series Database (TSDB) \
-# written on top of HBase. OpenTSDB was written to address a common need: store, \
-# index and serve metrics collected from computer systems (network gear, operating \
-# systems, applications) at a large scale, and make this data easily accessible \
-# and graphable.
-#
-# Based in part on a shell script by Jacek Masiulaniec at
-# https://github.com/masiulaniec/opentsdb-rhel/blob/master/src/tsdb-server.init.
-
-### BEGIN INIT INFO
-# Provides: opentsdb
-# Required-Start: $network $local_fs $remote_fs
-# Required-Stop: $network $local_fs $remote_fs
-# Short-Description: start and stop opentsdb
-# Description: OpenTSDB is a distributed, scalable Time Series Database (TSDB)
-# written on top of HBase. OpenTSDB was written to address a
-# common need: store, index and serve metrics collected from
-# computer systems (network gear, operating systems, applications)
-# at a large scale, and make this data easily accessible and
-# graphable.
-### END INIT INFO
-
-# Source init functions
-. /etc/init.d/functions
-
-# Maximum number of open files
-MAX_OPEN_FILES=65535
-
-# Default program options
-NAME=opentsdb
-PROG=/usr/bin/tsdb
-HOSTNAME=$(hostname --fqdn)
-USER=root
-
-# Default directories
-LOG_DIR=/var/log/opentsdb
-LOCK_DIR=/var/lock/subsys
-PID_DIR=/var/run/opentsdb
-
-# Global and Local sysconfig files
-[ -e /etc/sysconfig/opentsdb ] && . /etc/sysconfig/opentsdb
-[ "`basename $0`" != "$NAME" ] && [ -e /etc/sysconfig/`basename $0` ] && . /etc/sysconfig/`basename $0`
-
-# Set file names
-LOG_FILE=$LOG_DIR/$NAME-$HOSTNAME-
-LOCK_FILE=$LOCK_DIR/$NAME
-PID_FILE=$PID_DIR/$NAME.pid
-CONFIG=/etc/opentsdb/${NAME}.conf
-
-# Create dirs if they don't exist
-[ -e $LOG_DIR ] || (mkdir -p $LOG_DIR && chown $USER: $LOG_DIR)
-[ -e $PID_DIR ] || mkdir -p $PID_DIR
-
-PROG_OPTS="tsd --config=${CONFIG}"
-
-start() {
- echo -n "Starting ${NAME}: "
- curid="`id -u -n`"
- if [ "$curid" != root ] && [ "$curid" != "$USER" ] ; then
- echo "Must be run as root or $USER, but was run as $curid"
- return 1
- fi
- # Sets the maximum number of open file descriptors allowed.
- ulimit -n $MAX_OPEN_FILES
- curulimit="`ulimit -n`"
- if [ "$curulimit" -lt $MAX_OPEN_FILES ] ; then
- echo "'ulimit -n' must be greater than or equal to $MAX_OPEN_FILES, is $curulimit"
- return 1
- fi
-
- # Set a default value for JVMARGS
- : ${JVMXMX:=-Xmx6000m}
- : ${JVMARGS:=-DLOG_FILE_PREFIX=${LOG_FILE} -enableassertions -enablesystemassertions $JVMXMX -XX:OnOutOfMemoryError=/usr/share/opentsdb/tools/opentsdb_restart.py}
- export JVMARGS
-
- if [ "`id -u -n`" == root ] ; then
- # Changes the owner of the log directory to allow non-root OpenTSDB
- # daemons to create and rename log files.
- chown $USER: $LOG_DIR > /dev/null 2>&1
- chown $USER: ${LOG_FILE}*opentsdb.log > /dev/null 2>&1
- chown $USER: ${LOG_FILE}opentsdb.out > /dev/null 2>&1
- chown $USER: ${LOG_FILE}opentsdb.err > /dev/null 2>&1
-
- # Changes the owner of the lock, and the pid files to allow
- # non-root OpenTSDB daemons to run /usr/share/opentsdb/bin/opentsdb_restart.py.
- touch $LOCK_FILE && chown $USER: $LOCK_FILE
- touch $PID_FILE && chown $USER: $PID_FILE
- daemon --user $USER --pidfile $PID_FILE "$PROG $PROG_OPTS 1>> ${LOG_FILE}opentsdb.out 2>> ${LOG_FILE}opentsdb.err &"
- else
- # Don't have to change user.
- daemon --pidfile $PID_FILE "$PROG $PROG_OPTS 1>> ${LOG_FILE}opentsdb.out 2>> ${LOG_FILE}opentsdb.err &"
- fi
- retval=$?
- sleep 2
- echo
- [ $retval -eq 0 ] && (findproc > $PID_FILE && touch $LOCK_FILE)
- return $retval
-}
-
-stop() {
- echo -n "Stopping ${NAME}: "
- killproc -p $PID_FILE $NAME
- retval=$?
- echo
- # Non-root users don't have enough permission to remove pid and lock files.
- # So, the opentsdb_restart.py cannot get rid of the files, and the command
- # "service opentsdb status" will complain about the existing pid file.
- # Makes the pid file empty.
- echo > $PID_FILE
- [ $retval -eq 0 ] && (rm -f $PID_FILE && rm -f $LOCK_FILE)
- return $retval
-}
-
-restart() {
- stop
- start
-}
-
-reload() {
- restart
-}
-
-force_reload() {
- restart
-}
-
-rh_status() {
- # run checks to determine if the service is running or use generic status
- status -p $PID_FILE -l $LOCK_FILE $NAME
-}
-
-rh_status_q() {
- rh_status >/dev/null 2>&1
-}
-
-findproc() {
- pgrep -f "java .* net.opentsdb.tools.TSDMain .*${NAME}"
-}
-
-case "$1" in
- start)
- rh_status_q && exit 0
- $1
- ;;
- stop)
- rh_status_q || exit 0
- $1
- ;;
- restart)
- $1
- ;;
- reload)
- rh_status_q || exit 7
- $1
- ;;
- force-reload)
- force_reload
- ;;
- status)
- rh_status
- ;;
- condrestart|try-restart)
- rh_status_q || exit 0
- restart
- ;;
- *)
- echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}"
- exit 2
-esac
-exit $?
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd-server-stats.sh b/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd-server-stats.sh
deleted file mode 100644
index abae3a8f7151ba0266771d24f0c983ff358f89ab..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd-server-stats.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-#
-# This runs on the same host as OpenTSDB server, collects TSD server
-# performance metrics, and feeds them back into the TSD.
-#
-INTERVAL=15
-while :; do
- echo stats || exit
- sleep $INTERVAL
-done | nc -w 30 localhost6 4242 \
- | sed 's/^/put /' \
- | nc -w 30 localhost6 4242
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd.init b/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd.init
deleted file mode 100644
index 939f772b0019a4d1f1c52b2b286571b4d8c5394e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/files/default/tsd.init
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/bin/sh
-#
-# chkconfig: 2345 60 40
-# description: TSDB server
-#
-# From https://github.com/masiulaniec/opentsdb-rhel/blob/master/src/tsdb-server.init
-
-. /etc/rc.d/init.d/functions
-. /etc/sysconfig/tsd
-
-RETVAL=0
-
-start() {
- status >/dev/null && return 0
- echo "Starting tsdb server..."
-
- # Respect sysconfig jvm args.
- export JVMARGS="-enableassertions -enablesystemassertions $JVMARGS"
-
- /usr/bin/tsdb tsd \
- --cachedir=$CACHE \
- --port=$PORT \
- --zkquorum=$QUORUM \
- --staticroot=/usr/share/opentsdb/static \
- --auto-metric \
- 1>/var/log/tsd/tsdb-server.out \
- 2>/var/log/tsd/tsdb-server.err \
- &
-}
-
-stop() {
- status >/dev/null || return 0
- echo "Stopping tsdb-server..."
- f=$(mktemp /tmp/pidfile.XXXXXXX)
- findproc >$f
- killproc -p $f >/dev/null
- rm -f $f
-}
-
-findproc() {
- pgrep -f '^java .*/usr/share/opentsdb/tsdb-2.0.0.jar.* net.opentsdb.tools.TSDMain'
-}
-
-status() {
- pid=$(findproc)
- if [ -n "$pid" ]
- then
- echo "tsd is running... (pid $pid)"
- return 0
- else
- echo "tsd is stopped."
- return 1
- fi
-}
-
-case "$1" in
- start)
- start
- ;;
-
- stop)
- stop
- ;;
-
- restart)
- stop
- start
- ;;
-
- status)
- status
- RETVAL=$?
- ;;
-
- *)
- echo >&2 "Usage: $0 {start|stop|restart|status}"
- ;;
-esac
-
-exit $RETVAL
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/metadata.rb b/lc-gdn-chef/cookbooks/wn_opentsdb/metadata.rb
deleted file mode 100644
index 2a62daebe8d813669d4b1571aab75de767e21d57..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/metadata.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
-name 'wn_opentsdb'
-maintainer 'Bryan Wann'
-maintainer_email 'bwann-chef@wann.net'
-license 'Apache-2.0'
-description 'Installs/Configures OpenTSDB'
-source_url 'https://github.com/bwann/chef/cookbooks'
-long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
-# never EVER change this number, ever.
-version '0.1.0'
-depends 'wn_java'
-supports 'centos'
-supports 'debian'
-supports 'ubuntu'
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/recipes/default.rb b/lc-gdn-chef/cookbooks/wn_opentsdb/recipes/default.rb
deleted file mode 100644
index 65506379ade0ad1150e535e160646ff198577da4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/recipes/default.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
-#
-# Cookbook Name:: wn_opentsdb
-# Recipe:: default
-#
-# Author:: Bryan Wann ()
-#
-# Copyright:: 2018, Bryan Wann
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include_recipe 'wn_java'
-
-packages = %w{
- gnuplot
- opentsdb
-}
-
-package packages do
- action :upgrade
-end
-
-# PNG cache dir, in-memory
-directory '/dev/shm/tsd-cache' do
- owner 'root'
- group 'root'
- mode '0755'
- action :create
-end
-
-directory '/var/log/opentsdb' do
- owner 'root'
- group 'root'
- mode '0755'
- action :create
-end
-
-directory '/etc/opentsdb' do
- mode '0755'
- owner 'root'
- group 'root'
- action :create
-end
-
-template '/etc/opentsdb/opentsdb.conf' do
- source 'opentsdb.conf.erb'
- mode '0644'
- owner 'root'
- group 'root'
- notifies :restart, 'service[opentsdb]'
-end
-
-# XXX: For whatever reason /etc/opentsdb/ should've been symlinked to
-# /usr/share/opentsdb/etc/ but broken in 2.2.0 RPM? This makes things happy
-cookbook_file '/etc/opentsdb/logback.xml' do
- source 'logback.xml'
- mode '0644'
- owner 'root'
- group 'root'
-end
-
-template '/etc/sysconfig/opentsd' do
- source 'tsd.sysconfig.erb'
- mode '0644'
- owner 'root'
- group 'root'
- notifies :restart, 'service[opentsdb]'
-end
-
-node.default['wn_cron']['jobs']['cleanup tsd cache'] = {
- 'time' => '* 0 * * *',
- 'command' =>
- 'find /dev/shm/tsd-cache -maxdepth 1 -type f -mtime +1 ' \
- '-exec /bin/rm -f {} \;',
-}
-
-# Systemd doesn't like a symlink
-# TODO: make proper a systemd unit
-cookbook_file '/etc/init.d/opentsdb' do
- not_if { File.exist?('/etc/init.d/opentsdb') }
- source 'opentsdb.init'
- mode '0755'
- owner 'root'
- group 'root'
-end
-
-cookbook_file '/root/tsd-server-stats.sh' do
- source 'tsd-server-stats.sh'
- mode '0755'
- owner 'root'
- group 'root'
-end
-
-execute 'starting TSD stats gatherer' do
- command '/root/tsd-server-stats.sh &'
- not_if 'pgrep -f tsd-server-stats.sh'
-end
-
-service 'opentsdb' do
- action [:start, :enable]
-end
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/opentsdb.conf.erb b/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/opentsdb.conf.erb
deleted file mode 100644
index 88990a74d81d1cca9d6a33e13125f45695950f2d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/opentsdb.conf.erb
+++ /dev/null
@@ -1,6 +0,0 @@
-# This file is managed by Chef. Do not make any changes here or they will
-# be overwritten! See the wn_opentsdb cookbook for more information.
-<% conf = node['wn_opentsdb']['conf'].to_hash -%>
-<% conf.sort.each do |k, v| -%>
-<%= k %> <%= v %>
-<% end %>
diff --git a/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/tsd.sysconfig.erb b/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/tsd.sysconfig.erb
deleted file mode 100644
index d17881989d9423d02b587b14f97ca2074fc62186..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/wn_opentsdb/templates/default/tsd.sysconfig.erb
+++ /dev/null
@@ -1,5 +0,0 @@
-# This file is managed by Chef. Do not make any changes here or they will
-# be overwritten! See the wn_opentsdb cookbook for more information.
-<% node['wn_opentsdb']['sysconfig'].to_hash.sort.each do |k, v| -%>
-<%= k.upcase %>=<%= v %>
-<% end %>
diff --git a/lc-gdn-chef/cookbooks/yum-epel/CHANGELOG.md b/lc-gdn-chef/cookbooks/yum-epel/CHANGELOG.md
deleted file mode 100644
index fdc7ef4b37462b92439e9cc3ab0d3af2b8ccf63d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/CHANGELOG.md
+++ /dev/null
@@ -1,230 +0,0 @@
-# yum-epel Cookbook CHANGELOG
-
-This file is used to list changes made in each version of the yum-epel cookbook.
-
-## 4.4.1 - *2022-02-02*
-
-- Remove delivery and move to calling RSpec directly via a reusable workflow
-
-## 4.4.0 - *2022-01-27*
-
-- Allow the cookbook to install EPEL on Alma Linux
-- Remove testing for CentOS 8 (use Stream instead)
-
-## 4.3.0 - *2022-01-07*
-
-- Allow the cookbook to install EPEL on Rocky Linux
-
-## 4.2.3 - *2021-11-03*
-
-- Rename helper method to `epel_8_repos` to not conflict with yum-centos
-
-## 4.2.2 - *2021-11-02*
-
-- Update documentation for epel on CentOS Stream
-
-## 4.2.1 - *2021-11-02*
-
-- Add epel and epel-debuginfo repos by default for CentOS Streams
-
-## 4.2.0 - *2021-11-02*
-
-- Add support for CentOS Stream 8
-
-## 4.1.4 - *2021-08-30*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 4.1.3 - *2021-07-14*
-
-- Remove deprecated `failoverprorioty` setting
-
-## 4.1.2 - *2021-06-01*
-
-- Standardise files with files in sous-chefs/repo-management
-
-## 4.1.1 - *2021-01-24*
-
-- Fix support for Oracle Linux
-
-## 4.1.0 - *2021-01-14*
-
-- Sous Chefs Adoption
-
-## 4.0.1 (2021-01-04)
-
-- Return empty array on non-yum systems - [@ramereth](https://github.com/ramereth)
-
-## 4.0.0 (2020-12-15)
-
-- Cookstyle fixes - [@tas50](https://github.com/tas50)
-- Switch all http URLs to HTTPS URLs - [@damacus](https://github.com/damacus)
-- Switch gpgkey urls - [@knightorc](https://github.com/knightorc)
-- Require Chef 12.15+ - [@tas50](https://github.com/tas50)
-- Remove CentOS 6 / Amazon Linux 201X support/testing - [@ramereth](https://github.com/ramereth)
-- Improve InSpec test by using yum.repo resource - [@ramereth](https://github.com/ramereth)
-- Fix repo descriptions on Amazon Linux - [@ramereth](https://github.com/ramereth)
-- Test all supported repos in new "all" suite - [@ramereth](https://github.com/ramereth)
-- Ensure other epel repos are not enabled in default suite - [@ramereth](https://github.com/ramereth)
-- Add various modular and playground repos for EL8 - [@ramereth](https://github.com/ramereth)
-- Update README - [@ramereth](https://github.com/ramereth)
-- Cleanup metadata.rb formatting - [@ramereth](https://github.com/ramereth)
-
-## 3.3.0 (2018-10-09)
-
-- Fix cookbook to work on all releases of Amazon Linux 2
-- Test on Amazon Linux 2 in specs and in Travis
-
-## 3.2.0 (2018-07-24)
-
-- Support EPEL on ARM32.
-
-## 3.1.0 (2018-02-26)
-
-- Add support for Amazon Linux 2.0
-
-## 3.0.0 (2018-02-16)
-
-- Require Chef 12.14+ and remove the compat_resource dependency
-
-## 2.1.2 (2017-06-15)
-
-- Switch from Rake testing to Local Delivery
-- Update apache2 license string to be a SPDX compliant string
-- Change yum repo location of gpgkey to download.fedoraproject.org instead of dl.fedoraproject.org
-- Avoid chefspec deprecations and speed up specs
-
-## 2.1.1 (2017-01-05)
-
-- Revert how mirror list strings are generated to fix RHEL 7
-
-## 2.1.0 (2016-12-22)
-
-- Test in Travis using the current build of chef/chef docker image
-- Test on older Chef
-- allow the use of any valid property via attributes
-- fixing tests
-- output versions in the job that is being ran
-- cops
-
-## 2.0.0 (2016-11-26)
-
-- Clarify that we require Chef 12.1+ not 12.0+
-- Use compat_resource instead of the yum cookbook
-- Add integration testing with inspec
-
-## 1.0.2 (2016-10-21)
-
-- Remove upper bound on yum constraint
-
-## 1.0.1 (2016-09-11)
-
-- Fix epel-testing attributes
-
-## 1.0.0 (2016-09-06)
-
-- Add chef_version metadata
-- Testing updates
-- Remove support for Chef 11
-
-## v0.7.1 (2016-08-19)
-
-- Remove bats testing
-- Fix attribute settings
-- Cleanup travis file
-
-## v0.7.0 (2016-04-27)
-
-- Added support for IBM zlinux platform
-- Added back the Test Kitchen support for local vagrant testing
-- Added long_description to the metadata
-- Loosen the dependency on the yum cookbook
-
-## v0.6.5
-
-- updated to use `make_cache` option that yum cookbook allows for the yum resource to use.
-
-## v0.6.5 (2015-11-23)
-
-- Fix setting bool false properties
-
-## v0.6.4 (2015-10-27)
-
-- Updating default recipe for Chef 13 deprecation warnings. Not
-- passing nil.
-
-## v0.6.3 (2015-09-22)
-
-- Added standard Chef gitignore and chefignore files
-- Added the standard chef rubocop config
-- Update contributing, maintainers, and testing docs
-- Update Chefspec config to 4.X format
-- Update distro versions in the Kitchen config
-- Add Travis CI and cookbook version badges in the readme
-- Expand the requirements section in the readme
-- Add additional distros to the metadata
-- Added source_url and issues_url metadata
-
-## v0.6.2 (2015-06-21)
-
-- Depending on yum ~> 3.2
-- Support for the password attribute wasn't added to the
-- yum_repository LWRP until yum 3.2.0.
-
-## v0.6.1 (2015-06-21)
-
-- Switching to https for URL links
-- Using metalink URLs
-
-## v0.6.0 (2015-01-03)
-
-- Adding EL7 support
-
-## v0.5.3 (2014-10-28)
-
-- Revert Use HTTPS for GPG keys and mirror lists
-
-## v0.5.2 (2014-10-28)
-
-- Use HTTPS for GPG keys and mirror lists
-- Use local key on Amazon Linux
-
-## v0.5.0 (2014-09-02)
-
-- Add all attribute available to LWRP to allow for tuning.
-
-## v0.4.0 (2014-07-27)
-
-- [#9] Allowing list of repositories to reference configurable.
-
-## v0.3.6 (2014-04-09)
-
-- [COOK-4509] add RHEL7 support to yum-epel cookbook
-
-## v0.3.4 (2014-02-19)
-
-COOK-4353 - Fixing typo in readme
-
-## v0.3.2 (2014-02-13)
-
-Updating README to explain the 'managed' parameter
-
-## v0.3.0 (2014-02-12)
-
-[COOK-4292] - Do not manage secondary repos by default
-
-## v0.2.0
-
-Adding Amazon Linux support
-
-## v0.1.6
-
-Fixing up attribute values for EL6
-
-## v0.1.4
-
-Adding CHANGELOG.md
-
-## v0.1.0
-
-initial release
diff --git a/lc-gdn-chef/cookbooks/yum-epel/LICENSE b/lc-gdn-chef/cookbooks/yum-epel/LICENSE
deleted file mode 100644
index 8f71f43fee3f78649d238238cbde51e6d7055c82..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
diff --git a/lc-gdn-chef/cookbooks/yum-epel/README.md b/lc-gdn-chef/cookbooks/yum-epel/README.md
deleted file mode 100644
index bbd3312caff20eb180b7fa986784d1a7de0720df..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/README.md
+++ /dev/null
@@ -1,125 +0,0 @@
-# yum-epel Cookbook
-
-[](https://supermarket.chef.io/cookbooks/yum-epel)
-[](https://github.com/sous-chefs/yum-epel/actions?query=workflow%3Aci)
-[](#backers)
-[](#sponsors)
-[](https://opensource.org/licenses/Apache-2.0)
-
-Extra Packages for Enterprise Linux (or EPEL) is a Fedora Special Interest Group that creates, maintains, and manages a high quality set of additional packages for Enterprise Linux, including, but not limited to, Red Hat Enterprise Linux (RHEL), CentOS , CentOS Stream and Scientific Linux (SL), Oracle Linux (OL).
-
-The yum-epel cookbook takes over management of the default repositoryids shipped with epel-release.
-
-Below is a table showing which repositoryids we manage that are shipped by default via the epel-release package:
-
-| Repo ID | EL 7 | EL 8 | CentOS Stream 8 |
-| ------------------------------ | :--------------: | :--------------: | :--------------: |
-| epel |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-| epel-debuginfo |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-| epel-modular | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-modular-debuginfo | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-modular-source | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-next | :x: | :x: |:heavy_check_mark:|
-| epel-next-debuginfo | :x: | :x: |:heavy_check_mark:|
-| epel-next-source | :x: | :x: |:heavy_check_mark:|
-| epel-next-testing | :x: | :x: |:heavy_check_mark:|
-| epel-next-testing-debug | :x: | :x: |:heavy_check_mark:|
-| epel-next-testing-source | :x: | :x: |:heavy_check_mark:|
-| epel-playground | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-playground-debuginfo | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-playground-source | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-source |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing-debuginfo |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing-modular | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing-modular-debuginfo | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing-modular-source | :x: |:heavy_check_mark:|:heavy_check_mark:|
-| epel-testing-source |:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
-
-## Requirements
-
-### Platforms
-
-- RHEL/CentOS and derivatives
-
-### Chef
-
-- Chef 12.15+
-
-## Maintainers
-
-This cookbook is maintained by the Sous Chefs. The Sous Chefs are a community of Chef cookbook maintainers working together to maintain important cookbooks. If you’d like to know more please visit [sous-chefs.org](https://sous-chefs.org/) or come chat with us on the Chef Community Slack in [#sous-chefs](https://chefcommunity.slack.com/messages/C2V7B88SF).
-
-### Cookbooks
-
-- none
-
-## Attributes
-
-See individual repository attribute files for defaults.
-
-## Recipes
-
-- `yum-epel::default` Generates `yum_repository` configs for the standard EPEL repositories. By default the `epel` repository is enabled. For CentOS Stream, the [epel-next](https://docs.fedoraproject.org/en-US/epel/#what_is_epel_next) repository is also enabled.
-
-## Usage Example
-
-To disable the epel repository through a Role or Environment definition
-
-```
-default_attributes(
- :yum => {
- :epel => {
- :enabled => {
- false
- }
- }
- }
- )
-```
-
-Uncommonly used repositoryids are not managed by default. This is speeds up integration testing pipelines by avoiding yum-cache builds that nobody cares about. To enable the epel-testing repository with a wrapper cookbook, place the following in a recipe:
-
-```ruby
-node.default['yum']['epel-testing']['enabled'] = true
-node.default['yum']['epel-testing']['managed'] = true
-include_recipe 'yum-epel'
-```
-
-## More Examples
-
-Point the epel repositories at an internally hosted server.
-
-```ruby
-node.default['yum']['epel']['enabled'] = true
-node.default['yum']['epel']['mirrorlist'] = nil
-node.default['yum']['epel']['baseurl'] = 'https://internal.example.com/centos/7/os/x86_64'
-node.default['yum']['epel']['sslverify'] = false
-
-include_recipe 'yum-epel'
-```
-
-## Contributors
-
-This project exists thanks to all the people who [contribute.](https://opencollective.com/sous-chefs/contributors.svg?width=890&button=false)
-
-### Backers
-
-Thank you to all our backers!
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/default.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/default.rb
deleted file mode 100644
index 8d32a0858c17886fd15627af7a39415ac7c7d66d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/default.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-default['yum-epel']['repos'] =
- value_for_platform(
- %w(almalinux redhat centos oracle rocky) => {
- '>= 8.0' => epel_8_repos,
- '~> 7.0' =>
- %w(
- epel
- epel-debuginfo
- epel-source
- epel-testing
- epel-testing-debuginfo
- epel-testing-source
- ),
- },
- 'amazon' => {
- 'default' =>
- %w(
- epel
- epel-debuginfo
- epel-source
- epel-testing
- epel-testing-debuginfo
- epel-testing-source
- ),
- },
- # No-op on non-yum systems
- 'default' => []
- )
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-debuginfo.rb
deleted file mode 100644
index 01b9b1e85d711fa63526c14bca0a80e2bc88b010..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-debuginfo.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-default['yum']['epel-debuginfo']['repositoryid'] = 'epel-debuginfo'
-
-if platform?('amazon')
- default['yum']['epel-debuginfo']['description'] = 'Extra Packages for 7 - $basearch - Debug'
- default['yum']['epel-debuginfo']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=epel-debug-7&arch=$basearch'
- default['yum']['epel-debuginfo']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
-else
- default['yum']['epel-debuginfo']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch - Debug"
- default['yum']['epel-debuginfo']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-debug-#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel-debuginfo']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-end
-default['yum']['epel-debuginfo']['gpgcheck'] = true
-default['yum']['epel-debuginfo']['enabled'] = false
-default['yum']['epel-debuginfo']['managed'] = false
-default['yum']['epel-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-debuginfo.rb
deleted file mode 100644
index 04e9f5214f67a980642ae76387aef0fac8b3716d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-debuginfo.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-modular-debuginfo']['repositoryid'] = 'epel-modular-debuginfo'
-default['yum']['epel-modular-debuginfo']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever - $basearch - Debug'
-default['yum']['epel-modular-debuginfo']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=epel-modular-debug-$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-modular-debuginfo']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-modular-debuginfo']['gpgcheck'] = true
-default['yum']['epel-modular-debuginfo']['enabled'] = false
-default['yum']['epel-modular-debuginfo']['managed'] = false
-default['yum']['epel-modular-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-source.rb
deleted file mode 100644
index 150e1ebfcbd4939b8c37eff8dfdbc9d42c486e0c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular-source.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-modular-source']['repositoryid'] = 'epel-modular-source'
-default['yum']['epel-modular-source']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever - $basearch - Source'
-default['yum']['epel-modular-source']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=epel-modular-source-$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-modular-source']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-modular-source']['gpgcheck'] = true
-default['yum']['epel-modular-source']['enabled'] = false
-default['yum']['epel-modular-source']['managed'] = false
-default['yum']['epel-modular-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular.rb
deleted file mode 100644
index 9fea914f54f4d255ce23bed494400ad7aa70fef1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-modular.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-modular']['repositoryid'] = 'epel-modular'
-default['yum']['epel-modular']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever - $basearch'
-default['yum']['epel-modular']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=epel-modular-$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-modular']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-modular']['gpgcheck'] = true
-default['yum']['epel-modular']['enabled'] = false
-default['yum']['epel-modular']['managed'] = false
-default['yum']['epel-modular']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-debuginfo.rb
deleted file mode 100644
index dd4368760f060a3a282f813798d905f38c7bd246..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-debuginfo.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-default['yum']['epel-next-debuginfo']['repositoryid'] = 'epel-next-debuginfo'
-default['yum']['epel-next-debuginfo']['description'] =
- "Extra Packages for #{node['platform_version'].to_i} - $basearch - Next - Debug"
-default['yum']['epel-next-debuginfo']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-next-debug-#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next-debuginfo']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next-debuginfo']['gpgcheck'] = true
-default['yum']['epel-next-debuginfo']['enabled'] = false
-default['yum']['epel-next-debuginfo']['managed'] = false
-default['yum']['epel-next-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-source.rb
deleted file mode 100644
index e7c70a76dc37d9cd0cdf2fadec8f49a0c99cf3c0..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-source.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-default['yum']['epel-next-source']['repositoryid'] = 'epel-next-source'
-default['yum']['epel-next-source']['description'] =
- "Extra Packages for #{node['platform_version'].to_i} $basearch - Next -Source"
-default['yum']['epel-next-source']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-next-source-#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next-source']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next-source']['gpgcheck'] = true
-default['yum']['epel-next-source']['enabled'] = false
-default['yum']['epel-next-source']['managed'] = false
-default['yum']['epel-next-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-debuginfo.rb
deleted file mode 100644
index 118b636e4b50527b1c7dadbca559756cc4d203e3..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-debuginfo.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-default['yum']['epel-next-testing-debuginfo']['repositoryid'] = 'epel-next-testing-debuginfo'
-default['yum']['epel-next-testing-debuginfo']['description'] =
- "Extra Packages for #{node['platform_version'].to_i} - $basearch - Next - Testing Debug"
-default['yum']['epel-next-testing-debuginfo']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-testing-next-debug-#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next-testing-debuginfo']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next-testing-debuginfo']['gpgcheck'] = true
-default['yum']['epel-next-testing-debuginfo']['enabled'] = false
-default['yum']['epel-next-testing-debuginfo']['managed'] = false
-default['yum']['epel-next-testing-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-source.rb
deleted file mode 100644
index 8548e540b787ec32be0f7ca05ddc3c099ff3a39c..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing-source.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-default['yum']['epel-next-testing-source']['repositoryid'] = 'epel-next-testing-source'
-default['yum']['epel-next-testing-source']['description'] =
- "Extra Packages for #{node['platform_version'].to_i} - $basearch - Next - Testing Source"
-default['yum']['epel-next-testing-source']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=testing-source-epel#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next-testing-source']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next-testing-source']['gpgcheck'] = true
-default['yum']['epel-next-testing-source']['enabled'] = false
-default['yum']['epel-next-testing-source']['managed'] = false
-default['yum']['epel-next-testing-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing.rb
deleted file mode 100644
index 18476e3fd2c3bc6f6b6288dc374d04bfb5787fc8..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next-testing.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-default['yum']['epel-next-testing']['repositoryid'] = 'epel-next-testing'
-default['yum']['epel-next-testing']['description'] =
- "Extra Packages for #{node['platform_version'].to_i} - $basearch - Next - Testing"
-default['yum']['epel-next-testing']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-testing-next-#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next-testing']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next-testing']['gpgcheck'] = true
-default['yum']['epel-next-testing']['enabled'] = false
-default['yum']['epel-next-testing']['managed'] = false
-default['yum']['epel-next-testing']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next.rb
deleted file mode 100644
index 246f5a346f9ef746a571f9ef4db957737c7278cf..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-next.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-default['yum']['epel-next']['repositoryid'] = 'epel-next'
-default['yum']['epel-next']['gpgcheck'] = true
-default['yum']['epel-next']['description'] = 'Extra Packages for $releasever - Next - $basearch'
-default['yum']['epel-next']['mirrorlist'] =
- "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-next-#{node['platform_version'].to_i}&arch=$basearch"
-default['yum']['epel-next']['gpgkey'] =
- "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-default['yum']['epel-next']['enabled'] = true
-default['yum']['epel-next']['managed'] = true
-default['yum']['epel-next']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-debuginfo.rb
deleted file mode 100644
index d823b83d273adf31b78efc11f97464c5fd81b5de..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-debuginfo.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-playground-debuginfo']['repositoryid'] = 'epel-playground-debuginfo'
-default['yum']['epel-playground-debuginfo']['description'] = 'Extra Packages for Enterprise Linux $releasever - Playground - $basearch - Debug'
-default['yum']['epel-playground-debuginfo']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=playground-debug-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-playground-debuginfo']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-playground-debuginfo']['gpgcheck'] = true
-default['yum']['epel-playground-debuginfo']['enabled'] = false
-default['yum']['epel-playground-debuginfo']['managed'] = false
-default['yum']['epel-playground-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-source.rb
deleted file mode 100644
index c0a0cb3371b364a00be7fe73a49d0656ff1a803e..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground-source.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-playground-source']['repositoryid'] = 'epel-playground-source'
-default['yum']['epel-playground-source']['description'] = 'Extra Packages for Enterprise Linux $releasever - Playground - $basearch - Source'
-default['yum']['epel-playground-source']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=playground-source-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-playground-source']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-playground-source']['gpgcheck'] = true
-default['yum']['epel-playground-source']['enabled'] = false
-default['yum']['epel-playground-source']['managed'] = false
-default['yum']['epel-playground-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground.rb
deleted file mode 100644
index 3b78bb51c79576a709dc36775b0612f721687a58..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-playground.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-playground']['repositoryid'] = 'epel-playground'
-default['yum']['epel-playground']['description'] = 'Extra Packages for Enterprise Linux $releasever - Playground - $basearch'
-default['yum']['epel-playground']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=playground-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-playground']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-playground']['gpgcheck'] = true
-default['yum']['epel-playground']['enabled'] = false
-default['yum']['epel-playground']['managed'] = false
-default['yum']['epel-playground']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-source.rb
deleted file mode 100644
index 6c899890655c41aabbad84a2f734778d8e83556d..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-source.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-default['yum']['epel-source']['repositoryid'] = 'epel-source'
-
-if platform?('amazon')
- default['yum']['epel-source']['description'] = 'Extra Packages for 7 - $basearch - Source'
- default['yum']['epel-source']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=epel-source-7&arch=$basearch'
- default['yum']['epel-source']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
-else
- default['yum']['epel-source']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch - Source"
- default['yum']['epel-source']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-source-#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel-source']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-end
-default['yum']['epel-source']['gpgcheck'] = true
-default['yum']['epel-source']['enabled'] = false
-default['yum']['epel-source']['managed'] = false
-default['yum']['epel-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-debuginfo.rb
deleted file mode 100644
index 4330c333e83fc59e6b9aa4c5c0a2cbcf61dd98f1..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-debuginfo.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-default['yum']['epel-testing-debuginfo']['repositoryid'] = 'epel-testing-debuginfo'
-
-if platform?('amazon')
- default['yum']['epel-testing-debuginfo']['description'] = 'Extra Packages for 7 - $basearch - Testing Debug'
- default['yum']['epel-testing-debuginfo']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=testing-debug-epel7&arch=$basearch'
- default['yum']['epel-testing-debuginfo']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
-else
- default['yum']['epel-testing-debuginfo']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch - Testing Debug"
- default['yum']['epel-testing-debuginfo']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=testing-debug-epel#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel-testing-debuginfo']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-end
-default['yum']['epel-testing-debuginfo']['gpgcheck'] = true
-default['yum']['epel-testing-debuginfo']['enabled'] = false
-default['yum']['epel-testing-debuginfo']['managed'] = false
-default['yum']['epel-testing-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-debuginfo.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-debuginfo.rb
deleted file mode 100644
index fa938de1cef88fa591e795fb245b076f2859309a..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-debuginfo.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-testing-modular-debuginfo']['repositoryid'] = 'epel-testing-modular-debuginfo'
-default['yum']['epel-testing-modular-debuginfo']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever - Testing - $basearch - Debug'
-default['yum']['epel-testing-modular-debuginfo']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=testing-modular-debug-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-testing-modular-debuginfo']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-testing-modular-debuginfo']['gpgcheck'] = true
-default['yum']['epel-testing-modular-debuginfo']['enabled'] = false
-default['yum']['epel-testing-modular-debuginfo']['managed'] = false
-default['yum']['epel-testing-modular-debuginfo']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-source.rb
deleted file mode 100644
index 4a4ae41e5f72f212965b67a6c4a1af95b14e9e11..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular-source.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-testing-modular-source']['repositoryid'] = 'epel-testing-modular-source'
-default['yum']['epel-testing-modular-source']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever- Testing - $basearch - Source'
-default['yum']['epel-testing-modular-source']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=testing-modular-source-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-testing-modular-source']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-testing-modular-source']['gpgcheck'] = true
-default['yum']['epel-testing-modular-source']['enabled'] = false
-default['yum']['epel-testing-modular-source']['managed'] = false
-default['yum']['epel-testing-modular-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular.rb
deleted file mode 100644
index a707c65fd88a9a3baac03fa8ddde1c82aba8ebe5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-modular.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-default['yum']['epel-testing-modular']['repositoryid'] = 'epel-testing-modular'
-default['yum']['epel-testing-modular']['description'] = 'Extra Packages for Enterprise Linux Modular $releasever - Testing - $basearch'
-default['yum']['epel-testing-modular']['mirrorlist'] = 'https://mirrors.fedoraproject.org/metalink?repo=testing-modular-epel$releasever&arch=$basearch&infra=$infra&content=$contentdir'
-default['yum']['epel-testing-modular']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-8'
-default['yum']['epel-testing-modular']['gpgcheck'] = true
-default['yum']['epel-testing-modular']['enabled'] = false
-default['yum']['epel-testing-modular']['managed'] = false
-default['yum']['epel-testing-modular']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-source.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-source.rb
deleted file mode 100644
index 7da8d249e4c9fb7f6b655dd29510eee87acf2a17..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing-source.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-default['yum']['epel-testing-source']['repositoryid'] = 'epel-testing-source'
-
-if platform?('amazon')
- default['yum']['epel-testing-source']['description'] = 'Extra Packages for 7 - $basearch - Testing Source'
- default['yum']['epel-testing-source']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=testing-source-epel7&arch=$basearch'
- default['yum']['epel-testing-source']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
-else
- default['yum']['epel-testing-source']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch - Testing Source"
- default['yum']['epel-testing-source']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=testing-source-epel#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel-testing-source']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-end
-default['yum']['epel-testing-source']['gpgcheck'] = true
-default['yum']['epel-testing-source']['enabled'] = false
-default['yum']['epel-testing-source']['managed'] = false
-default['yum']['epel-testing-source']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing.rb
deleted file mode 100644
index ae3aa16a4862008e0c60f7fae3fee6db301075dc..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel-testing.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-default['yum']['epel-testing']['repositoryid'] = 'epel-testing'
-
-if platform?('amazon')
- default['yum']['epel-testing']['description'] = 'Extra Packages for 7 - $basearch - Testing '
- default['yum']['epel-testing']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=testing-epel7&arch=$basearch'
- default['yum']['epel-testing']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
-else
- default['yum']['epel-testing']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch - Testing "
- default['yum']['epel-testing']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=testing-epel#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel-testing']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
-end
-default['yum']['epel-testing']['gpgcheck'] = true
-default['yum']['epel-testing']['enabled'] = false
-default['yum']['epel-testing']['managed'] = false
-default['yum']['epel-testing']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel.rb b/lc-gdn-chef/cookbooks/yum-epel/attributes/epel.rb
deleted file mode 100644
index 119296ddc84eebfa55f9f1fc2c1ca4810326927b..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/attributes/epel.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-default['yum']['epel']['repositoryid'] = 'epel'
-default['yum']['epel']['gpgcheck'] = true
-case node['kernel']['machine']
-when 'armv7l', 'armv7hl'
- default['yum']['epel']['baseurl'] = 'https://armv7.dev.centos.org/repodir/epel-pass-1/'
- default['yum']['epel']['gpgcheck'] = false
-when 's390x'
- default['yum']['epel']['baseurl'] = 'https://kojipkgs.fedoraproject.org/rhel/rc/7/Server/s390x/os/'
- default['yum']['epel']['gpgkey'] = 'https://kojipkgs.fedoraproject.org/rhel/rc/7/Server/s390x/os/RPM-GPG-KEY-redhat-release'
-else
- if platform?('amazon')
- default['yum']['epel']['description'] = 'Extra Packages for 7 - $basearch'
- default['yum']['epel']['mirrorlist'] = 'https://mirrors.fedoraproject.org/mirrorlist?repo=epel-7&arch=$basearch'
- default['yum']['epel']['gpgkey'] = 'https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7'
- else
- default['yum']['epel']['description'] = "Extra Packages for #{node['platform_version'].to_i} - $basearch"
- default['yum']['epel']['mirrorlist'] = "https://mirrors.fedoraproject.org/mirrorlist?repo=epel-#{node['platform_version'].to_i}&arch=$basearch"
- default['yum']['epel']['gpgkey'] = "https://download.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-#{node['platform_version'].to_i}"
- end
-end
-default['yum']['epel']['enabled'] = true
-default['yum']['epel']['managed'] = true
-default['yum']['epel']['make_cache'] = true
diff --git a/lc-gdn-chef/cookbooks/yum-epel/chefignore b/lc-gdn-chef/cookbooks/yum-epel/chefignore
deleted file mode 100644
index cc170ea79ed8bde58bbb77030c0c2ab70b959c21..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/chefignore
+++ /dev/null
@@ -1,115 +0,0 @@
-# Put files/directories that should be ignored in this file when uploading
-# to a Chef Infra Server or Supermarket.
-# Lines that start with '# ' are comments.
-
-# OS generated files #
-######################
-.DS_Store
-ehthumbs.db
-Icon?
-nohup.out
-Thumbs.db
-.envrc
-
-# EDITORS #
-###########
-.#*
-.project
-.settings
-*_flymake
-*_flymake.*
-*.bak
-*.sw[a-z]
-*.tmproj
-*~
-\#*
-REVISION
-TAGS*
-tmtags
-.vscode
-.editorconfig
-
-## COMPILED ##
-##############
-*.class
-*.com
-*.dll
-*.exe
-*.o
-*.pyc
-*.so
-*/rdoc/
-a.out
-mkmf.log
-
-# Testing #
-###########
-.circleci/*
-.codeclimate.yml
-.delivery/*
-.foodcritic
-.kitchen*
-.mdlrc
-.overcommit.yml
-.rspec
-.rubocop.yml
-.travis.yml
-.watchr
-.yamllint
-azure-pipelines.yml
-Dangerfile
-examples/*
-features/*
-Guardfile
-kitchen.yml*
-mlc_config.json
-Procfile
-Rakefile
-spec/*
-test/*
-
-# SCM #
-#######
-.git
-.gitattributes
-.gitconfig
-.github/*
-.gitignore
-.gitkeep
-.gitmodules
-.svn
-*/.bzr/*
-*/.git
-*/.hg/*
-*/.svn/*
-
-# Berkshelf #
-#############
-Berksfile
-Berksfile.lock
-cookbooks/*
-tmp
-
-# Bundler #
-###########
-vendor/*
-Gemfile
-Gemfile.lock
-
-# Policyfile #
-##############
-Policyfile.rb
-Policyfile.lock.json
-
-# Documentation #
-#############
-CODE_OF_CONDUCT*
-CONTRIBUTING*
-documentation/*
-TESTING*
-UPGRADING*
-
-# Vagrant #
-###########
-.vagrant
-Vagrantfile
diff --git a/lc-gdn-chef/cookbooks/yum-epel/kitchen.dokken.yml b/lc-gdn-chef/cookbooks/yum-epel/kitchen.dokken.yml
deleted file mode 100644
index 4b33f8aad7ab0eb8b752bf30b2cdb04a14567600..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/kitchen.dokken.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-driver:
- name: dokken
- privileged: true # because Docker and SystemD
- chef_version: <%= ENV['CHEF_VERSION'] || 'current' %>
- chef_license: accept-no-persist
-
-transport:
- name: dokken
-
-provisioner:
- name: dokken
- deprecations_as_errors: true
-
-verifier:
- name: inspec
-
-platforms:
- - name: almalinux-8
- driver:
- image: dokken/almalinux-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: amazonlinux-2
- driver:
- image: dokken/amazonlinux-2
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: centos-7
- driver:
- image: dokken/centos-7
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: centos-stream-8
- driver:
- image: dokken/centos-stream-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: oraclelinux-7
- driver:
- image: dokken/oraclelinux-7
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: oraclelinux-8
- driver:
- image: dokken/oraclelinux-8
- pid_one_command: /usr/lib/systemd/systemd
-
- - name: rockylinux-8
- driver:
- image: dokken/rockylinux-8
- pid_one_command: /usr/lib/systemd/systemd
diff --git a/lc-gdn-chef/cookbooks/yum-epel/libraries/helpers.rb b/lc-gdn-chef/cookbooks/yum-epel/libraries/helpers.rb
deleted file mode 100644
index 8898dad98fb8b1db409d6269d326922a7f291b42..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/libraries/helpers.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-module YumEpel
- module Cookbook
- module Helpers
- def epel_8_repos
- repos = %w(
- epel
- epel-debuginfo
- epel-modular
- epel-modular-debuginfo
- epel-modular-source
- epel-playground
- epel-playground-debuginfo
- epel-playground-source
- epel-source
- epel-testing
- epel-testing-debuginfo
- epel-testing-modular
- epel-testing-modular-debuginfo
- epel-testing-modular-source
- epel-testing-source
- )
-
- repos.concat(
- %w(
- epel-next
- epel-next-debuginfo
- epel-next-source
- epel-next-testing
- epel-next-testing-debuginfo
- epel-next-testing-source
- )
- ) if yum_epel_centos_stream?
-
- repos
- end
-
- private
-
- def yum_epel_centos_stream?
- respond_to?(:centos_stream_platform?) && centos_stream_platform?
- end
- end
- end
-end
-# Needed to used in attributes/
-Chef::Node.include ::YumEpel::Cookbook::Helpers
diff --git a/lc-gdn-chef/cookbooks/yum-epel/metadata.json b/lc-gdn-chef/cookbooks/yum-epel/metadata.json
deleted file mode 100644
index 0d76f72ec88726061dfedd477683a7fe7d3ac1a5..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/metadata.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
- "name": "yum-epel",
- "description": "Installs and configures the EPEL Yum repository",
- "long_description": "",
- "maintainer": "Sous Chefs",
- "maintainer_email": "help@sous-chefs.org",
- "license": "Apache-2.0",
- "platforms": {
- "amazon": ">= 0.0.0",
- "centos": ">= 0.0.0",
- "oracle": ">= 0.0.0",
- "redhat": ">= 0.0.0",
- "scientific": ">= 0.0.0",
- "zlinux": ">= 0.0.0"
- },
- "dependencies": {
-
- },
- "providing": {
-
- },
- "recipes": {
-
- },
- "version": "4.4.1",
- "source_url": "https://github.com/sous-chefs/yum-epel",
- "issues_url": "https://github.com/sous-chefs/yum-epel/issues",
- "privacy": false,
- "chef_versions": [
- [
- ">= 12.15"
- ]
- ],
- "ohai_versions": [
-
- ],
- "gems": [
-
- ],
- "eager_load_libraries": true
-}
diff --git a/lc-gdn-chef/cookbooks/yum-epel/metadata.rb b/lc-gdn-chef/cookbooks/yum-epel/metadata.rb
deleted file mode 100644
index 5c868aebb6e3d317a0d9e1a7e004ae23638ed425..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/metadata.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-name 'yum-epel'
-maintainer 'Sous Chefs'
-maintainer_email 'help@sous-chefs.org'
-license 'Apache-2.0'
-description 'Installs and configures the EPEL Yum repository'
-version '4.4.1'
-source_url 'https://github.com/sous-chefs/yum-epel'
-issues_url 'https://github.com/sous-chefs/yum-epel/issues'
-chef_version '>= 12.15'
-
-supports 'amazon'
-supports 'centos'
-supports 'oracle'
-supports 'redhat'
-supports 'scientific'
-supports 'zlinux'
diff --git a/lc-gdn-chef/cookbooks/yum-epel/recipes/default.rb b/lc-gdn-chef/cookbooks/yum-epel/recipes/default.rb
deleted file mode 100644
index f8aa7ac1c502a10d7b52a0b56d7b1c2a1f896979..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/cookbooks/yum-epel/recipes/default.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Author:: Sean OMeara ()
-# Cookbook:: yum-epel
-# Recipe:: default
-#
-# Copyright:: 2013-2019, Chef Software, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-node['yum-epel']['repos'].each do |repo|
- next unless node['yum'][repo]['managed']
- yum_repository repo do
- node['yum'][repo].each do |config, value|
- send(config.to_sym, value) unless value.nil? || config == 'managed'
- end
- end
-end
diff --git a/lc-gdn-chef/dist.sh b/lc-gdn-chef/dist.sh
deleted file mode 100755
index 3b2a8a295e6379e8db444c6c1c7c774510eaf4aa..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/dist.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-
-tar czfv leighco.tar.gz cookbooks
-scp leighco.tar.gz web0:/var/www/lc/html/chef/
-
-
-
-
-
diff --git a/lc-gdn-chef/leighco.rb b/lc-gdn-chef/leighco.rb
deleted file mode 100644
index b828da7ce4817a5d9e7319060f5c2547f3e02b20..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/leighco.rb
+++ /dev/null
@@ -1 +0,0 @@
-recipe_url "https://leigh-co.com/chef/leighco.tar.gz"
\ No newline at end of file
diff --git a/lc-gdn-chef/mx1.json b/lc-gdn-chef/mx1.json
deleted file mode 100644
index 0b3836171585395fc2a94981870e8136b65d18ef..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/mx1.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "name": "or0-r0",
- "chef_environment": "_default",
- "normal": {
- },
- "run_list": [
- "recipe[lc::hostname]",
- "recipe[lc::ssh]",
- "recipe[firewall::disable_firewall]",
- "recipe[selinux::disabled]",
- "recipe[lc-gdn-minecraft::neweden-relay]",
- "recipe[lc-gdn-mail::mx0-relay]"
- ]
-}
\ No newline at end of file
diff --git a/lc-gdn-chef/test1.json b/lc-gdn-chef/test1.json
deleted file mode 100644
index 81ca7f32b3e52ac1482b1ab37e4ad6e67d937ee4..0000000000000000000000000000000000000000
--- a/lc-gdn-chef/test1.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "name": "test1",
- "chef_environment": "_default",
- "normal": {
- },
- "run_list": [
- "recipe[lc::hostname]",
- "recipe[lc::ssh]",
- "recipe[firewall::disable_firewall]",
- "recipe[selinux::disabled]"
- ]
-}
\ No newline at end of file
diff --git a/lc-gdn-ipbeacon-svc/build.gradle b/lc-gdn-ipbeacon-svc/build.gradle
deleted file mode 100644
index 18518ca46fd894c459601d0a143353678b22f04c..0000000000000000000000000000000000000000
--- a/lc-gdn-ipbeacon-svc/build.gradle
+++ /dev/null
@@ -1,46 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-group 'leighco'
-version '1.1'
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
-
-application {
- mainClass = 'lc.gdn.ipbeacon.service.IPBeaconService'
-}
-
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
-
diff --git a/lc-gdn-ipbeacon-svc/src/main/java/lc/gdn/ipbeacon/service/IPBeaconService.java b/lc-gdn-ipbeacon-svc/src/main/java/lc/gdn/ipbeacon/service/IPBeaconService.java
deleted file mode 100644
index deb0c54951435270fe669e35ab36d97c8493480d..0000000000000000000000000000000000000000
--- a/lc-gdn-ipbeacon-svc/src/main/java/lc/gdn/ipbeacon/service/IPBeaconService.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package lc.gdn.ipbeacon.service;
-
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPClient;
-import lc.esp.sdk.ESPProducer;
-import lc.esp.sdk.telemetry.TelemetryFrame;
-import lc.esp.sdk.telemetry.TelemetrySymbol;
-import lc.mecha.http.SimpleHttpClient;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import lc.mecha.util.UniversalJob;
-import org.apache.commons.lang.StringUtils;
-
-import java.io.Serializable;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-import java.time.Instant;
-import java.util.Locale;
-
-import static java.lang.Thread.sleep;
-
-/**
- * Determine the public ip for this installation and periodically broadcast it to ESP.
- *
- * Environment variables:
- *
- * LC_SITE: The site-name for this beacon.
- *
- * @author Alex Leigh
- */
-public class IPBeaconService extends BasicallyDangerous {
- public static final String ENV_SITE = "LC_EST";
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(IPBeaconService.class);
- private final ESPClient esp = new ESPClient();
- private final SimpleHttpClient http = new SimpleHttpClient();
- private String est;
-
- public IPBeaconService() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
- est = System.getenv(ENV_SITE);
- if (StringUtils.isEmpty(est)) {
- logger.error(ENV_SITE + " must be set.");
- System.exit(UniversalJob.RET_BADARGS);
- }
- est = est.toUpperCase(Locale.ROOT);
- }
-
- public static void main(String[] args) throws Exception {
- IPBeaconService beacon = new IPBeaconService();
- beacon.run();
- }
-
- @Override
- public void runDangerously() throws Exception {
- esp.start();
- ESPAddress addr = new ESPAddress("topic://gdn.global.sys.mon.tlm");
- ESPProducer prod = esp.createSession().createProducer(addr);
-
- //noinspection InfiniteLoopStatement
- while (true) {
- JSONObject resJson = http.getJson("https://api.my-ip.io/ip.json");
- logger.info("Found IP. {}", resJson.get("ip"));
-
- TelemetryFrame frame = new TelemetryFrame(Instant.now());
- frame.getTags().put("est", est);
- frame.getTags().put("type", "vpn");
- frame.getSymbols().add(new TelemetrySymbol("ip.public", (Serializable) resJson.get("ip"), true));
- logger.info("Created frame: {}", frame);
- prod.send(frame.toMessage());
-
- sleep(60 * 10 * 1000);
- }
- }
-}
diff --git a/lc-hello-app/build.gradle b/lc-hello-app/build.gradle
deleted file mode 100644
index 3f48e21cf85f894af3bdf74345c536c6b1de857c..0000000000000000000000000000000000000000
--- a/lc-hello-app/build.gradle
+++ /dev/null
@@ -1,25 +0,0 @@
-plugins {
- id 'java'
- id 'application'
-}
-
-group 'leighco'
-version '1.0'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- implementation project(':lc-eo-schema')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-hello-app/src/main/java/lc/hello/app/HelloApp.java b/lc-hello-app/src/main/java/lc/hello/app/HelloApp.java
deleted file mode 100644
index 57f4e23396132f119fa4349dc864967fc1981f77..0000000000000000000000000000000000000000
--- a/lc-hello-app/src/main/java/lc/hello/app/HelloApp.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package lc.hello.app;
-
-import lc.eo.EO;
-import lc.esp.sdk.*;
-import lc.hello.schema.v1.RequestElementDAO;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import java.util.UUID;
-
-public class HelloApp {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(HelloApp.class);
-
- public static void main(String[] args) throws Exception {
- try (ESPClient esp = new ESPClient()) {
- esp.start();
-
- ESPAddress src = new ESPAddress("lc", "dev", "hello-client",
- UUID.randomUUID().toString(), ESPAddressClass.QUEUE, ESPMessageClass.COMMAND);
- ESPAddress dst = new ESPAddress("queue://lc.dev.example.hello.cmd");
-
- try (ESPSession session = esp.createSession()) {
- ESPConsumer consumer = session.createConsumer(src);
- ESPProducer producer = session.createProducer(dst);
-
- EO req = RequestElementDAO.create();
- RequestElementDAO.setName(req, "Alex");
-
- ESPMessage request = new ESPMessage(req);
- request.setCorrelationId(UUID.randomUUID().toString());
- request.setReplyTo(src);
- producer.send(request);
- logger.info("Waiting for reply...");
- ESPMessage msg = consumer.receive();
- logger.info("Received reply: {}", msg);
- }
- }
- }
-}
diff --git a/lc-hello-app/src/main/java/lc/hello/app/SchemaGenerator.java b/lc-hello-app/src/main/java/lc/hello/app/SchemaGenerator.java
deleted file mode 100644
index 61df323f5258657e852631c3b5e67054aac50ef6..0000000000000000000000000000000000000000
--- a/lc-hello-app/src/main/java/lc/hello/app/SchemaGenerator.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2004-2017, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.hello.app;
-
-import lc.eo.EO;
-import lc.eo.schema.DAOGenerator;
-import lc.eo.schema.ElementElementDAO;
-import lc.eo.schema.SchemaElementDAO;
-import lc.eo.schema.util.AttributeUtil;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-
-/**
- * This class generates the schema^2 (that is, the schema for the schema). In turn this schema can be used to generateJava
- * the DAO objects for the schema components (Element, Loop, Field, etc). These schema DAOs are used throughout
- * the ESP codebase.
- *
- * @author Alex Leigh
- */
-public final class SchemaGenerator {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(SchemaGenerator.class);
-
- // Observe the procedures of a general alert
-
- public static EO generate() {
- final EO schema = lc.eo.schema.util.SchemaUtil.create("hello.schema.v1");
-
- final EO helloRequest = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(helloRequest);
- ElementElementDAO.setEoType(helloRequest, "hello.schema.v1.request");
- ElementElementDAO.getAttributes(helloRequest).add(AttributeUtil.create("name", "Name"));
-
- final EO helloResponse = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(helloResponse);
- ElementElementDAO.setEoType(helloResponse, "hello.schema.v1.response");
- ElementElementDAO.getAttributes(helloResponse).add(AttributeUtil.create("message", "Message"));
-
- return schema;
- }
-
- public static void main(final String... args) throws IOException {
- UniversalJob.banner(logger, "Apotheosis mk3", "2014-2022 Alex Leigh");
- final EO schema = generate();
- final DAOGenerator gen = new DAOGenerator(new File("lc-hello-app/src/main/java"));
- gen.generateJava("lc.hello.schema.v1", schema);
- try (PrintWriter pw = new PrintWriter(new FileWriter(new File("out.js")))) {
- gen.generateEcma5(pw, "lc.hello.schema.v1", schema);
- }
- logger.info("Generated schema: {}", schema);
- }
-}
diff --git a/lc-hello-app/src/main/java/lc/hello/schema/v1/RequestElementDAO.java b/lc-hello-app/src/main/java/lc/hello/schema/v1/RequestElementDAO.java
deleted file mode 100644
index 8f4bb96df7afa40b59d43d4d208f804a7ae6b96c..0000000000000000000000000000000000000000
--- a/lc-hello-app/src/main/java/lc/hello/schema/v1/RequestElementDAO.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.hello.schema.v1;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class RequestElementDAO {
- public static final String API_TYPE = "hello.schema.v1.request";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_NAME = "name";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getName(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_NAME);
- }
-
- public static void setName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_NAME, value);
- }
-
- public static void setNameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_NAME, label);
- }
-
- public static void getLabelOrValueName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_NAME);
- }
-
- public static void setIfUnsetName(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_NAME, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-hello-app/src/main/java/lc/hello/schema/v1/ResponseElementDAO.java b/lc-hello-app/src/main/java/lc/hello/schema/v1/ResponseElementDAO.java
deleted file mode 100644
index e58d5b95d76de325daa9b12c4d62e8bbbbf596cd..0000000000000000000000000000000000000000
--- a/lc-hello-app/src/main/java/lc/hello/schema/v1/ResponseElementDAO.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.hello.schema.v1;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class ResponseElementDAO {
- public static final String API_TYPE = "hello.schema.v1.response";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_MESSAGE = "message";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=message, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Message, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getMessage(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_MESSAGE);
- }
-
- public static void setMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_MESSAGE, value);
- }
-
- public static void setMessageLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_MESSAGE, label);
- }
-
- public static void getLabelOrValueMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_MESSAGE);
- }
-
- public static void setIfUnsetMessage(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_MESSAGE, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-ionos/build.gradle b/lc-ionos/build.gradle
deleted file mode 100644
index e125a461971d635e0e68e9fd7b92971707f06517..0000000000000000000000000000000000000000
--- a/lc-ionos/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-plugins {
- id 'java'
-}
-
-group 'leighco'
-version '18.0'
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
- implementation project(':lc-mecha-http-client')
- implementation project(':lc-esp-sdk')
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-ionos/src/main/java/lc/ionos/IONOSUpdateService.java b/lc-ionos/src/main/java/lc/ionos/IONOSUpdateService.java
deleted file mode 100644
index 7b9addc7ccc0ec9e61d32b1863c795e360b0948f..0000000000000000000000000000000000000000
--- a/lc-ionos/src/main/java/lc/ionos/IONOSUpdateService.java
+++ /dev/null
@@ -1,160 +0,0 @@
-package lc.ionos;
-
-import lc.esp.sdk.ESPClient;
-import lc.mecha.http.SimpleHttpClient;
-import lc.mecha.json.JSONArray;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import lc.mecha.util.UniversalJob;
-import lc.zero.sdk.ZeroClient;
-import lc.zero.sdk.ZeroServiceConfig;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
-import org.apache.http.util.EntityUtils;
-
-import java.net.URI;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-
-import static java.lang.Thread.sleep;
-
-public class IONOSUpdateService extends BasicallyDangerous {
- public static final String ZERO_SERVICE = "ionos";
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(IONOSUpdateService.class);
- private final ESPClient esp = new ESPClient();
- private final SimpleHttpClient httpSimple = new SimpleHttpClient();
- private final CloseableHttpClient httpClient;
- private String est;
- private static final String urlBlock = "https://api.hosting.ionos.com/dns/v1/zones/";
-
- /*
- * This interacts with IONOS API
- * Docs are Here: https://developer.hosting.ionos.com/docs/dns
- */
-
- public IONOSUpdateService() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
- PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
- connectionManager.setMaxTotal(100);
- connectionManager.setDefaultMaxPerRoute(100);
- this.httpClient = HttpClients.custom().setConnectionManager(connectionManager).build();
-
- }
-
- public static void main(String[] args) throws Exception {
-
- IONOSUpdateService update = new IONOSUpdateService();
- update.run();
-
- }
-
- @Override
- public void runDangerously() throws Exception {
-
-
- try (ESPClient esp = new ESPClient()) {
- esp.start();
-
- ZeroClient zero = esp.getZero();
- ZeroServiceConfig cfg = zero.getZai().readConfig(ZERO_SERVICE);
- if (cfg == null) {
- logger.error("No ZERO service configuration provided.");
- System.exit(UniversalJob.RET_BADENV);
- }
- logger.info("Found service configuration: {}", cfg);
-
- // Store loaded ZAI config
- String apiCred = cfg.getCfg().getString("credentials");
- JSONArray domains = cfg.getCfg().getJSONArray("domains");
-
- String previousIP = "";
-
- //noinspection InfiniteLoopStatement
- while (true) {
-
- /* here go ahead and sub in me listening for ESP broadcasts, however since that might break
- if i lose connection with leigh-co i'ma go ahead and check myself
- */
-
- String currentIP = httpSimple.getJson("https://api.my-ip.io/ip.json").get("ip").toString();
- logger.info("Found IP. {}", currentIP);
-
- // Check if CurrentIP is equal to Previous IP skip if true
- if (!currentIP.equals(previousIP)) {
-
- // Creates JSON object in line with IONOS api
- JSONObject updateRecord = new JSONObject();
- updateRecord.put("disabled", false);
- updateRecord.put("content", currentIP);
- updateRecord.put("ttl", 3600);
- updateRecord.put("prio", 0);
-
- // Generate the GET client
- HttpGet get = new HttpGet(urlBlock);
- get.setHeader("accept", "application/json");
- get.setHeader("X-API-KEY", apiCred);
- get.setHeader("Content-Type", "application/json");
-
- // Generate the PUT client
- HttpPut put = new HttpPut();
- put.setHeader("accept", "application/json");
- put.setHeader("X-API-KEY", apiCred);
- put.setHeader("Content-Type", "application/json");
- put.setEntity(new StringEntity(updateRecord.toString()));
-
- // Read GET response and store in JSONArray
- HttpResponse res = httpClient.execute(get);
- JSONArray allZoneData = new JSONArray(EntityUtils.toString(res.getEntity()));
-
- logger.info("allZoneData: {}", allZoneData);
- logger.info("domains: {}", domains.toString());
-
- // Iterate through AllZoneData and compare domain names to wanted domains in configuration
- for (int i = 0; i < allZoneData.length(); i++) {
- for (int j = 0; j < domains.length(); j++) {
- JSONObject domain = allZoneData.getJSONObject(i);
- String wantedDomain = domains.getString(j);
-
- if (wantedDomain.equals(domain.getString("name"))) {
-
- logger.info("Domain: {}", domain.getString("name"));
-
- // Retrieve Records using Domain IDs for wanted Domains
- get.setURI(URI.create(urlBlock + domain.getString("id")));
- res = httpClient.execute(get);
- JSONArray recordData = new JSONObject(EntityUtils.toString(res.getEntity())).getJSONArray("records");
-
- // Iterate through "A" type records and update with current IP
- for (int k = 0; k < recordData.length(); k++){
- JSONObject record = recordData.getJSONObject(k);
-
- if (record.getString("type").equals("A")){
- logger.info("Updating record:{}, with IP: {}", record.getString("id"), currentIP);
- put.setURI(URI.create(urlBlock + domain.getString("id") + "/records/" + record.getString("id")));
- HttpResponse httpResponse = httpClient.execute(put);
-
- // Make sure to close all HTTPClient Sessions
- EntityUtils.consumeQuietly(httpResponse.getEntity());
- }
- }
- }
- }
- }
-
- // Sleep for 10 minutes
- // This should be removed once ESP issues have been figured out
- logger.info("Sleeping");
- sleep(60 * 10 * 1000);
- }
- }
- }
- }
-}
-
diff --git a/lc-ircbot-svc/build.gradle b/lc-ircbot-svc/build.gradle
deleted file mode 100644
index 0783cab2b0343efa96ecf5de0848ba778938fcc4..0000000000000000000000000000000000000000
--- a/lc-ircbot-svc/build.gradle
+++ /dev/null
@@ -1,46 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-group 'leighco'
-version '1.2'
-
-repositories {
- mavenCentral()
-}
-
-application {
- mainClass = 'lc.ircbot.svc.IRCBotService'
-}
-
-dependencies {
- implementation project(':lc-esp-cli-sdk')
- implementation project(':lc-mecha-db-mysql')
- implementation 'org.pircbotx:pircbotx:2.1'
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
diff --git a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/BotListener.java b/lc-ircbot-svc/src/main/java/lc/ircbot/svc/BotListener.java
deleted file mode 100644
index 391b15df47f83d3adba24a0789266afd104f2b71..0000000000000000000000000000000000000000
--- a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/BotListener.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package lc.ircbot.svc;
-
-import lc.esp.cli.sdk.CLIClient;
-import lc.esp.cli.sdk.CLIResponse;
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPClient;
-import org.pircbotx.hooks.ListenerAdapter;
-import org.pircbotx.hooks.types.GenericMessageEvent;
-
-import java.util.Arrays;
-
-/**
- * This class implements a {@link ListenerAdapter} which listens for messages sent to the bot, relaying them
- * to ESP.
- *
- * @author Alex Leigh
- * @since 1.2
- */
-public class BotListener extends ListenerAdapter {
- private final CLIClient cli;
-
- public BotListener() throws Exception {
- ESPClient client = new ESPClient();
- client.start();
- cli = new CLIClient(client);
- new Thread(cli).start();
- }
-
- @Override
- public void onGenericMessage(GenericMessageEvent event) {
- String baseMsg = event.getMessage().replaceAll("\\P{Print}", "");
- try {
- if (baseMsg.startsWith("evelyn:")) {
- String msg = baseMsg.substring("evelyn:".length()).trim();
- String[] msgArr = msg.split(" ");
- // FIXME: We must have a timeout here or we will hang/leak this thread
- CLIResponse res = cli.exec(new ESPAddress("queue://lc.global.loa.evelyn.cmd"), Arrays.asList(msgArr));
- event.respond(res.getOut());
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-}
diff --git a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/IRCBotService.java b/lc-ircbot-svc/src/main/java/lc/ircbot/svc/IRCBotService.java
deleted file mode 100644
index 1e3ffbf363a9ef2901889ecb1e8050b6853a92dd..0000000000000000000000000000000000000000
--- a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/IRCBotService.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package lc.ircbot.svc;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import org.pircbotx.Configuration;
-import org.pircbotx.PircBotX;
-
-/**
- * Evelyn is a digital assist in the guise of an IRC bot. It is programmed to inhabit the #leighco
- * channel on the LEIGH&CO IRC network.
- *
- * @author Alex Leigh
- * @since 1.0
- */
-
-//TODO This should really be a generic bot
-
-public class IRCBotService extends BasicallyDangerous {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(IRCBotService.class);
- private PircBotX bot;
- private final String name = "evelyn_";
-
- public static void main(String[] args) throws Exception {
- new IRCBotService().run();
- }
-
- public IRCBotService() throws Exception {
- // TODO: Configure this from ZERO
- Configuration configuration = new Configuration.Builder()
- .setName(name)
- .addServer("irc0.leigh-co.com")
- .addAutoJoinChannel("#leighco")
- .addListener(new BotListener())
- .setRealName(name)
- .setLogin(name)
- .setWebIrcUsername(name)
- .buildConfiguration();
-
- bot = new PircBotX(configuration);
- }
-
- @Override
- public void runDangerously() throws Exception {
- bot.startBot();
- }
-}
diff --git a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/PastebinThread.java b/lc-ircbot-svc/src/main/java/lc/ircbot/svc/PastebinThread.java
deleted file mode 100644
index 6c30188caf2f1dd83b2efec3cad77c171a047f7e..0000000000000000000000000000000000000000
--- a/lc-ircbot-svc/src/main/java/lc/ircbot/svc/PastebinThread.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package lc.ircbot.svc;
-
-import com.mchange.v2.c3p0.ComboPooledDataSource;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.pircbotx.PircBotX;
-
-import java.beans.PropertyVetoException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-/**
- * This class implements a {@link Runnable} which monitors pastebin for new pastes. If one is detected
- * then an announcement is sent to the #leighco IRC channel.
- *
- * @author Alex Leigh
- * @since 1.0
- */
-public class PastebinThread implements Runnable {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(PastebinThread.class);
- private final ComboPooledDataSource cpds;
- private int lastId;
- private PircBotX bot;
-
- public PastebinThread(PircBotX bot) throws PropertyVetoException, SQLException {
- cpds = getDataSource();
- lastId = getPastebinMax();
- logger.info("Last pastebin id: {}", lastId);
- this.bot = bot;
- }
-
- private ComboPooledDataSource getDataSource() throws PropertyVetoException {
- String pw = System.getenv("LC_PASSWORD");
- ComboPooledDataSource cpds = new ComboPooledDataSource();
- cpds.setDriverClass("com.mysql.cj.jdbc.Driver");
- cpds.setJdbcUrl("jdbc:mysql://db1.leigh-co.com/pastebin");
- cpds.setUser("pastebin");
- cpds.setPassword(pw);
- return cpds;
- }
-
- private int getPastebinMax() throws SQLException {
- try (Connection con = cpds.getConnection()) {
- try (PreparedStatement ps = con.prepareStatement("SELECT max(id) FROM files")) {
- try (ResultSet rs = ps.executeQuery()) {
- rs.next();
- return rs.getInt(1);
- }
- }
- }
- }
-
- public void announcePastebin() throws SQLException {
- try (Connection con = cpds.getConnection()) {
- try (PreparedStatement ps = con.prepareStatement("SELECT filename FROM files WHERE id > ?")) {
- ps.setInt(1, lastId);
- try (ResultSet rs = ps.executeQuery()) {
- while (rs.next()) {
- bot.send().message("#leighco", "New pastebin: https://leigh-co.com/pb/obj/" + rs.getString(1));
- }
- }
- }
- }
- }
-
- @Override
- public void run() {
- //noinspection InfiniteLoopStatement
- while (true) {
- try {
- int max = getPastebinMax();
- if (max != lastId) {
- announcePastebin();
- }
- lastId = max;
- } catch (SQLException e) {
- e.printStackTrace();
- }
-
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
-}
diff --git a/lc-isles/build.gradle b/lc-isles/build.gradle
index 4015502c229d77d9f951410bda125edd3d517022..7873547165cf2926695eec29f91130b410aa069c 100644
--- a/lc-isles/build.gradle
+++ b/lc-isles/build.gradle
@@ -4,7 +4,7 @@ plugins {
}
group 'leighco'
-version '18.0'
+version '19.0'
repositories {
mavenCentral()
diff --git a/lc-junk/build.gradle b/lc-junk/build.gradle
index 8a5cfbdb533e91572e73648d81fcd5728cf2ba42..49550db7eb970fbcef4f8deb33e2f75efd21e943 100644
--- a/lc-junk/build.gradle
+++ b/lc-junk/build.gradle
@@ -3,7 +3,7 @@ plugins {
}
group 'leighco'
-version '18.0'
+version '19.0'
repositories {
mavenCentral()
diff --git a/lc-mecha/build.gradle b/lc-mecha/build.gradle
index 4d562ec3e2af9918cb8d62a7ff92b5f0fd0013c6..71d48ff5617be9196db02d8f4ffb24d320e6c4c7 100644
--- a/lc-mecha/build.gradle
+++ b/lc-mecha/build.gradle
@@ -5,7 +5,7 @@ plugins {
}
group 'leighco'
-version '18.0-SNAPSHOT'
+version '19.0-SNAPSHOT'
repositories {
mavenCentral()
diff --git a/lc-minecraft-leighco-svc/build.gradle b/lc-minecraft-leighco-svc/build.gradle
deleted file mode 100644
index 6be1cf31783fea3aa74c904dcd31b66fac42a582..0000000000000000000000000000000000000000
--- a/lc-minecraft-leighco-svc/build.gradle
+++ /dev/null
@@ -1,21 +0,0 @@
-plugins {
- id 'java'
-}
-
-group 'leighco'
-version '18.0'
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- implementation project(':lc-minecraft-link-sdk')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-minecraft-leighco-svc/src/main/java/lc/minecraft/leighco/svc/LeighCoSvc.java b/lc-minecraft-leighco-svc/src/main/java/lc/minecraft/leighco/svc/LeighCoSvc.java
deleted file mode 100644
index fca1f6588ce72c24d1b81ca31553ff9bc0950727..0000000000000000000000000000000000000000
--- a/lc-minecraft-leighco-svc/src/main/java/lc/minecraft/leighco/svc/LeighCoSvc.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package lc.minecraft.leighco.svc;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-
-/**
- * This service implements the core LEIGH&CO processing for the New Eden Minecraft Network (NEMN).
- *
- * @author C. Alexander Leigh
- * @since mk2
- */
-public class LeighCoSvc {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(LeighCoSvc.class);
-
- public static void main(String[] args) {
- UniversalJob.banner(logger, "LEIGH&CO Processor mk2");
- }
-}
diff --git a/lc-minecraft-link-quests/LICENSE.md b/lc-minecraft-link-quests/LICENSE.md
deleted file mode 100644
index 4ed407067ce34fc61e10f61b36bfea57fb8b8288..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-quests/LICENSE.md
+++ /dev/null
@@ -1,637 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license
-document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and
-other kinds of works.
-
-The licenses for most software and other practical works are designed to take
-away your freedom to share and change the works. By contrast, the GNU General
-Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users.
-We, the Free Software Foundation, use the GNU General Public License for most
-of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our
-General Public Licenses are designed to make sure that you have the freedom to
-distribute copies of free software (and charge for them if you wish), that you
-receive source code or can get it if you want it, that you can change the
-software or use pieces of it in new free programs, and that you know you can do
-these things.
-
-To protect your rights, we need to prevent others from denying you these rights
-or asking you to surrender the rights. Therefore, you have certain
-responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for
-a fee, you must pass on to the recipients the same freedoms that you received.
-You must make sure that they, too, receive or can get the source code. And you
-must show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute
- and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that
-there is no warranty for this free software. For both users' and authors' sake,
-the GPL requires that modified versions be marked as changed, so that their
-problems will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified
-versions of the software inside them, although the manufacturer can do so. This
-is fundamentally incompatible with the aim of protecting users' freedom to
-change the software. The systematic pattern of such abuse occurs in the area of
-products for individuals to use, which is precisely where it is most
-unacceptable. Therefore, we have designed this version of the GPL to prohibit
-the practice for those products. If such problems arise substantially in other
-domains, we stand ready to extend this provision to those domains in future
-versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States
-should not allow patents to restrict development and use of software on
-general-purpose computers, but in those that do, we wish to avoid the special
-danger that patents applied to a free program could make it effectively
-proprietary. To prevent this, the GPL assures that patents cannot be used to
-render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification
-follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works,
-such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License.
-Each licensee is addressed as *you*. *Licensees* and *recipients* may be
-individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a
-fashion requiring copyright permission, other than the making of an exact copy.
-The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the
-Program.
-
-To *propagate* a work means to do anything with it that, without permission,
-would make you directly or secondarily liable for infringement under applicable
-copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification),
-making available to the public, and in some countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to
-make or receive copies. Mere interaction with a user through a computer
-network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the
-extent that it includes a convenient and prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the
- extent that warranties are provided), that licensees may convey the work
- under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a
-prominent item in the list meets this criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making
-modifications to it. *Object code* means any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard
-defined by a recognized standards body, or, in the case of interfaces specified
-for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the
-work as a whole, that (a) is included in the normal form of packaging a Major
-Component, but which is not part of that Major Component, and (b) serves only
-to enable use of the work with that Major Component, or to implement a Standard
-Interface for which an implementation is available to the public in source code
-form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on
-which the executable work runs, or a compiler used to produce the work, or an
-object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source
-code needed to generate, install, and (for an executable work) run the object
-code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose
-tools or generally available free programs which are used unmodified in
-performing those activities but which are not part of the work. For example,
-Corresponding Source includes interface definition files associated with source
-files for the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require, such as
-by intimate data communication or control flow between those subprograms and
-other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate
-automatically from other parts of the Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on
-the Program, and are irrevocable provided the stated conditions are met. This
-License explicitly affirms your unlimited permission to run the unmodified
-Program. The output from running a covered work is covered by this License only
-if the output, given its content, constitutes a covered work. This License
-acknowledges your rights of fair use or other equivalent, as provided by
-copyright law.
-
-You may make, run and propagate covered works that you do not convey, without
-conditions so long as your license otherwise remains in force. You may convey
-covered works to others for the sole purpose of having them make modifications
-exclusively for you, or provide you with facilities for running those works,
-provided that you comply with the terms of this License in conveying all
-material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your
-direction and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the
-conditions stated below. Sublicensing is not allowed; section 10 makes it
-unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure
-under any applicable law fulfilling obligations under article 11 of the WIPO
-copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention is
-effected by exercising rights under this License with respect to the covered
-work, and you disclaim any intention to limit operation or modification of the
-work as a means of enforcing, against the work's users, your or third parties'
-legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it,
-in any medium, provided that you conspicuously and appropriately publish on
-each copy an appropriate copyright notice; keep intact all notices stating that
-this License and any non-permissive terms added in accord with section 7 apply
-to the code; keep intact all notices of the absence of any warranty; and give
-all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may
-offer support or warranty protection for a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it
-from the Program, in the form of source code under the terms of section 4,
-provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and
- giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under
- this License and any conditions added under section 7. This requirement
- modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to
- anyone who comes into possession of a copy. This License will therefore
- apply, along with any applicable section 7 additional terms, to the whole
- of the work, and all its parts, regardless of how they are packaged. This
- License gives no permission to license the work in any other way, but it
- does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your work need
- not make them do so.
-
-A compilation of a covered work with other separate and independent works,
-which are not by their nature extensions of the covered work, and which are not
-combined with it such as to form a larger program, in or on a volume of a
-storage or distribution medium, is called an *aggregate* if the compilation and
-its resulting copyright are not used to limit the access or legal rights of the
-compilation's users beyond what the individual works permit. Inclusion of a
-covered work in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4
-and 5, provided that you also convey the machine-readable Corresponding Source
-under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including
- a physical distribution medium), accompanied by the Corresponding Source
- fixed on a durable physical medium customarily used for software
- interchange.
-- b) Convey the object code in, or embodied in, a physical product (including
- a physical distribution medium), accompanied by a written offer, valid for
- at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the
- object code either
- 1. a copy of the Corresponding Source for all the software in the product
- that is covered by this License, on a durable physical medium
- customarily used for software interchange, for a price no more than your
- reasonable cost of physically performing this conveying of source, or
- 2. access to copy the Corresponding Source from a network server at no
- charge.
-- c) Convey individual copies of the object code with a copy of the written
- offer to provide the Corresponding Source. This alternative is allowed only
- occasionally and noncommercially, and only if you received the object code
- with such an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding
- Source in the same way through the same place at no further charge. You
- need not require recipients to copy the Corresponding Source along with the
- object code. If the place to copy the object code is a network server, the
- Corresponding Source may be on a different server operated by you or a
- third party) that supports equivalent copying facilities, provided you
- maintain clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the Corresponding
- Source, you remain obligated to ensure that it is available for as long as
- needed to satisfy these requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you
- inform other peers where the object code and Corresponding Source of the
- work are being offered to the general public at no charge under subsection
- 6d.
-
-A separable portion of the object code, whose source code is excluded from the
-Corresponding Source as a System Library, need not be included in conveying the
-object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is
- normally used for personal, family, or household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be
-resolved in favor of coverage. For a particular product received by a
-particular user, *normally used* refers to a typical or common use of that
-class of product, regardless of the status of the particular user or of the way
-in which the particular user actually uses, or expects or is expected to use,
-the product. A product is a consumer product regardless of whether the product
-has substantial commercial, industrial or non-consumer uses, unless such uses
-represent the only significant mode of use of the product.
-
-*Installation Information* for a User Product means any methods, procedures,
-authorization keys, or other information required to install and execute
-modified versions of a covered work in that User Product from a modified
-version of its Corresponding Source. The information must suffice to ensure
-that the continued functioning of the modified object code is in no case
-prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as part of a
-transaction in which the right of possession and use of the User Product is
-transferred to the recipient in perpetuity or for a fixed term (regardless of
-how the transaction is characterized), the Corresponding Source conveyed under
-this section must be accompanied by the Installation Information. But this
-requirement does not apply if neither you nor any third party retains the
-ability to install modified object code on the User Product (for example, the
-work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates for a
-work that has been modified or installed by the recipient, or for the User
-Product in which it has been modified or installed. Access to a network may be
-denied when the modification itself materially and adversely affects the
-operation of the network or violates the rules and protocols for communication
-across the network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord
-with this section must be in a format that is publicly documented (and with an
-implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by
-making exceptions from one or more of its conditions. Additional permissions
-that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable
-law. If additional permissions apply only to part of the Program, that part may
-be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any
-additional permissions from that copy, or from any part of it. (Additional
-permissions may be written to require their own removal in certain cases when
-you modify the work.) You may place additional permissions on material, added
-by you to a covered work, for which you have or can give appropriate copyright
-permission.
-
-Notwithstanding any other provision of this License, for material you add to a
-covered work, you may (if authorized by the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of
- sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author
- attributions in that material or in the Appropriate Legal Notices displayed
- by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in reasonable
- ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors
- of the material; or
-- e) Declining to grant rights under trademark law for use of some trade
- names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by
- anyone who conveys the material (or modified versions of it) with
- contractual assumptions of liability to the recipient, for any liability
- that these contractual assumptions directly impose on those licensors and
- authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any
-part of it, contains a notice stating that it is governed by this License along
-with a term that is a further restriction, you may remove that term. If a
-license document contains a further restriction but permits relicensing or
-conveying under this License, you may add to a covered work material governed
-by the terms of that license document, provided that the further restriction
-does not survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place,
-in the relevant source files, a statement of the additional terms that apply to
-those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a
-separately written license, or stated as exceptions; the above requirements
-apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided
-under this License. Any attempt otherwise to propagate or modify it is void,
-and will automatically terminate your rights under this License (including any
-patent licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a
-particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and
- finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the
- violation by some reasonable means prior to 60 days after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated
-permanently if the copyright holder notifies you of the violation by some
-reasonable means, this is the first time you have received notice of violation
-of this License (for any work) from that copyright holder, and you cure the
-violation prior to 30 days after your receipt of the notice.
-
-Termination of your rights under this section does not terminate the licenses
-of parties who have received copies or rights from you under this License. If
-your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy
-of the Program. Ancillary propagation of a covered work occurring solely as a
-consequence of using peer-to-peer transmission to receive a copy likewise does
-not require acceptance. However, nothing other than this License grants you
-permission to propagate or modify any covered work. These actions infringe
-copyright if you do not accept this License. Therefore, by modifying or
-propagating a covered work, you indicate your acceptance of this License to do
-so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a
-license from the original licensors, to run, modify and propagate that work,
-subject to this License. You are not responsible for enforcing compliance by
-third parties with this License.
-
-An *entity transaction* is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered work
-results from an entity transaction, each party to that transaction who receives
-a copy of the work also receives whatever licenses to the work the party's
-predecessor in interest had or could give under the previous paragraph, plus a
-right to possession of the Corresponding Source of the work from the
-predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights
-granted or affirmed under this License. For example, you may not impose a
-license fee, royalty, or other charge for exercise of rights granted under this
-License, and you may not initiate litigation (including a cross-claim or
-counterclaim in a lawsuit) alleging that any patent claim is infringed by
-making, using, selling, offering for sale, or importing the Program or any
-portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of
-the Program or a work on which the Program is based. The work thus licensed is
-called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or
-controlled by the contributor, whether already acquired or hereafter acquired,
-that would be infringed by some manner, permitted by this License, of making,
-using, or selling its contributor version, but do not include claims that would
-be infringed only as a consequence of further modification of the contributor
-version. For purposes of this definition, *control* includes the right to grant
-patent sublicenses in a manner consistent with the requirements of this
-License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent
-license under the contributor's essential patent claims, to make, use, sell,
-offer for sale, import and otherwise run, modify and propagate the contents of
-its contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement
-or commitment, however denominated, not to enforce a patent (such as an express
-permission to practice a patent or covenant not to sue for patent
-infringement). To *grant* such a patent license to a party means to make such
-an agreement or commitment not to enforce a patent against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the
-Corresponding Source of the work is not available for anyone to copy, free of
-charge and under the terms of this License, through a publicly available
-network server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this
- particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to
- extend the patent license to downstream recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent
-license, your conveying the covered work in a country, or your recipient's use
-of the covered work in a country, would infringe one or more identifiable
-patents in that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you
-convey, or propagate by procuring conveyance of, a covered work, and grant a
-patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work,
-then the patent license you grant is automatically extended to all recipients
-of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of
-its coverage, prohibits the exercise of, or is conditioned on the non-exercise
-of one or more of the rights that are specifically granted under this License.
-You may not convey a covered work if you are a party to an arrangement with a
-third party that is in the business of distributing software, under which you
-make payment to the third party based on the extent of your activity of
-conveying the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory patent
-license
-
-- a) in connection with copies of the covered work conveyed by you (or copies
- made from those copies), or
-- b) primarily for and in connection with specific products or compilations
- that contain the covered work, unless you entered into that arrangement, or
- that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied
-license or other defenses to infringement that may otherwise be available to
-you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not excuse
-you from the conditions of this License. If you cannot convey a covered work so
-as to satisfy simultaneously your obligations under this License and any other
-pertinent obligations, then as a consequence you may not convey it at all. For
-example, if you agree to terms that obligate you to collect a royalty for
-further conveying from those to whom you convey the Program, the only way you
-could satisfy both those terms and this License would be to refrain entirely
-from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to
-link or combine any covered work with a work licensed under version 3 of the
-GNU Affero General Public License into a single combined work, and to convey
-the resulting work. The terms of this License will continue to apply to the
-part which is the covered work, but the special requirements of the GNU Affero
-General Public License, section 13, concerning interaction through a network
-will apply to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU
-General Public License from time to time. Such new versions will be similar in
-spirit to the present version, but may differ in detail to address new problems
-or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies
-that a certain numbered version of the GNU General Public License *or any later
-version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by
-the Free Software Foundation. If the Program does not specify a version number
-of the GNU General Public License, you may choose any version ever published by
-the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the
-GNU General Public License can be used, that proxy's public statement of
-acceptance of a version permanently authorizes you to choose that version for
-the Program.
-
-Later license versions may give you additional or different permissions.
-However, no additional obligations are imposed on any author or copyright
-holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
-LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
-PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER
-EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
-QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
-DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
-CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
-COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
-PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
-THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
-INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE
-PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY
-HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot
-be given local legal effect according to their terms, reviewing courts shall
-apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of
-liability accompanies a copy of the Program in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible
-use to the public, the best way to achieve this is to make it free software
-which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach
-them to the start of each source file to most effectively state the exclusion
-of warranty; and each file should have at least the *copyright* line and a
-pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like
-this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate
-parts of the General Public License. Of course, your program's commands might
-be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if
-any, to sign a *copyright disclaimer* for the program, if necessary. For more
-information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may consider
-it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead
-of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-minecraft-link-quests/build.gradle b/lc-minecraft-link-quests/build.gradle
deleted file mode 100644
index 46aead21d4926632dd5b2ba802a13ea7c86017f1..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-quests/build.gradle
+++ /dev/null
@@ -1,38 +0,0 @@
-plugins {
- id 'java'
- id 'java-library'
-}
-
-group 'leigh'
-version '16.0'
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-repositories {
- mavenCentral()
- mavenLocal()
- maven { url 'https://jitpack.io' }
- // Bukkit
- maven {
- url = 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/'
- content {
- includeGroup 'org.bukkit'
- includeGroup 'org.spigotmc'
- }
- }
- // Quests
- maven { url 'https://jitpack.io' }
-}
-
-
-dependencies {
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
- compileOnly 'com.github.PikaMug:Quests:4.1.3'
- compileOnly 'org.bukkit:bukkit:1.15.2-R0.1-SNAPSHOT'
- implementation project(':lc-minecraft-link')
- implementation project(':lc-mecha')
-}
-
-test {
- useJUnitPlatform()
-}
diff --git a/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/ReputationReward.java b/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/ReputationReward.java
deleted file mode 100644
index 1730e2d6fd00fbbef89c0b9c0281510aa7da7198..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/ReputationReward.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package leigh.minecraft.link.quests;
-
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.minecraft.link.LinkPlugin;
-import lc.minecraft.link.faction.Reputation;
-import me.blackvein.quests.CustomReward;
-import org.bukkit.Bukkit;
-import org.bukkit.entity.Player;
-
-import java.util.Map;
-
-/**
- * Custom reward handler for Quests which rewards the player with a reputation adjustment.
- *
- * @author Alex Leigh
- */
-public class ReputationReward extends CustomReward {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(ReputationReward.class);
- private final static String KEY_FACTION_ID = "factionId";
- private final static String KEY_REPUTATION = "reputation";
- private final Reputation reputation;
-
- public ReputationReward() {
- this.setName("LCReputationReward");
- this.setAuthor("LEIGH&CO");
- this.setDisplay("Reputation");
- this.setItem("CHEST", (short) 0);
- this.addStringPrompt(KEY_FACTION_ID, "Faction ID", null);
- this.addStringPrompt(KEY_REPUTATION, "Reputation Delta", null);
-
- LinkPlugin link =
- (LinkPlugin) Bukkit.getPluginManager().getPlugin("lc-minecraft-link");
- assert link != null;
-
- reputation = link.getReputation();
- }
-
- @Override
- public void giveReward(Player player, Map map) {
- int factionId = Integer.parseInt((String) map.get(KEY_FACTION_ID));
- int delta = Integer.parseInt((String) map.get(KEY_REPUTATION));
- reputation.adjustReputation(player.getUniqueId(), factionId, delta);
- }
-}
diff --git a/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/SwordReward.java b/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/SwordReward.java
deleted file mode 100644
index a4ec45743af53677b3d33c1908f4d0e06b8d64f3..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-quests/src/main/java/leigh/minecraft/link/quests/SwordReward.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package leigh.minecraft.link.quests;
-
-import me.blackvein.quests.CustomReward;
-import org.bukkit.Bukkit;
-import org.bukkit.Material;
-import org.bukkit.attribute.Attribute;
-import org.bukkit.attribute.AttributeModifier;
-import org.bukkit.enchantments.Enchantment;
-import org.bukkit.entity.Player;
-import org.bukkit.inventory.EquipmentSlot;
-import org.bukkit.inventory.Inventory;
-import org.bukkit.inventory.ItemStack;
-import org.bukkit.inventory.meta.ItemMeta;
-
-import java.util.Arrays;
-import java.util.Map;
-import java.util.UUID;
-
-/**
- * Custom reward for Quests which rewards the player with a LEIGH&CO prototype sword.
- *
- * @author Alex Leigh
- */
-public class SwordReward extends CustomReward {
- // Note that changing these may break previously-created quests. See quests.yml
- private final static String KEY_ATTACK_SPEED = "attackSpeed";
- private final static String KEY_ATTACK_POWER = "attackPower";
- private final static String KEY_LORE = "lore";
- private final static String KEY_ENCHANT_DMG_ALL = "damageAll";
- private final static String KEY_DISPLAY_NAME = "displayName";
-
- public SwordReward() {
- // This is the name the quest editor will use
- this.setName("LCSwordReward");
- this.setAuthor("LEIGH&CO");
- this.setDisplay("Prototype Sword");
- this.setItem("CHEST", (short) 0);
- this.addStringPrompt(KEY_DISPLAY_NAME, "Display Name", null);
- this.addStringPrompt(KEY_LORE, "Lore Text", null);
- this.addStringPrompt(KEY_ATTACK_POWER, "Attack power of the sword.", null);
- this.addStringPrompt(KEY_ATTACK_SPEED, "Attack Speed.", null);
- this.addStringPrompt(KEY_ENCHANT_DMG_ALL, "DMG Enchant Level (0-3)", "0");
- }
-
- @Override
- public void giveReward(Player player, Map map) {
- // Multiple of 9, between 9 and 54
- Inventory inv = Bukkit.getServer().createInventory(player, 9, "Reward");
- ItemStack sword = new ItemStack(Material.IRON_SWORD, 1);
- ItemMeta meta = sword.getItemMeta();
-
- String lore = (String) map.get(KEY_LORE);
- if (lore != null) {
- meta.setLore(Arrays.asList(lore));
- }
-
- String name = (String) map.get(KEY_DISPLAY_NAME);
- if (name != null) {
- meta.setDisplayName(name);
- }
-
- meta.setLore(Arrays.asList("A LEIGH&CO factory prototype."));
-
- try {
- int power = Integer.parseInt((String) map.get(KEY_ATTACK_POWER));
- AttributeModifier damageModifier = new AttributeModifier(UUID.randomUUID(),
- "Technica", power, AttributeModifier.Operation.ADD_NUMBER, EquipmentSlot.HAND);
- meta.addAttributeModifier(Attribute.GENERIC_ATTACK_DAMAGE, damageModifier);
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- try {
- int speed = Integer.parseInt((String) map.get(KEY_ATTACK_SPEED));
- AttributeModifier speedModifier = new AttributeModifier(UUID.randomUUID(),
- "Technica", speed, AttributeModifier.Operation.ADD_NUMBER, EquipmentSlot.HAND);
- meta.addAttributeModifier(Attribute.GENERIC_ATTACK_SPEED, speedModifier);
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- try {
- int dmgLevel = Integer.parseInt((String) map.get(KEY_ENCHANT_DMG_ALL));
- // We prompted for 0-3 but will actually take more.
- if (dmgLevel > 0) {
- meta.addEnchant(Enchantment.DAMAGE_ALL, dmgLevel, true);
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- sword.setItemMeta(meta);
- inv.setItem(0, sword);
- player.openInventory(inv);
- }
-}
diff --git a/lc-minecraft-link-quests/src/main/resources/plugin.yml b/lc-minecraft-link-quests/src/main/resources/plugin.yml
deleted file mode 100644
index f0f788f501db9fb78f52daa0bfd33c4063347b7c..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-quests/src/main/resources/plugin.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-name: LEIGHCOQuests
-version: 1.0
-description: Quests module for LEIGH&CO
-depend: [ Quests MinecraftLink ]
\ No newline at end of file
diff --git a/lc-minecraft-link-sdk/build.gradle b/lc-minecraft-link-sdk/build.gradle
deleted file mode 100644
index dc4deda00fcfce9fdd5721f15325042f9bc460a9..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-sdk/build.gradle
+++ /dev/null
@@ -1,24 +0,0 @@
-plugins {
- id 'java'
-}
-
-group 'leighco'
-version '2.0'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- implementation project(':lc-eo-schema')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1'
-}
-
-test {
- useJUnitPlatform()
-}
\ No newline at end of file
diff --git a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/LinkAddresses.java b/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/LinkAddresses.java
deleted file mode 100644
index 980b7b3d45135ea32453fdcc4f5c096536e6ff96..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/LinkAddresses.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package lc.minecraft.link.sdk;
-
-import lc.esp.sdk.ESPAddress;
-import lc.esp.sdk.ESPAddressClass;
-import lc.esp.sdk.ESPMessageClass;
-
-public class LinkAddresses {
- private final static ESPAddress telemetryAddress = new ESPAddress("leighco", "prod",
- "minecraft", "neweden", ESPAddressClass.TOPIC,
- ESPMessageClass.TELEMETRY);
-
- /**
- * Return the telemetry address. All telemetry from the world servers is sent to this address.
- */
- public static ESPAddress getTelemetryAddress() {
- return telemetryAddress;
- }
-}
diff --git a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/SchemaGenerator.java b/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/SchemaGenerator.java
deleted file mode 100644
index d5882d6e620039345b1a884b0bf768be3bc42c73..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/SchemaGenerator.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package lc.minecraft.link.sdk;
-
-import lc.eo.EO;
-import lc.eo.EODataType;
-import lc.eo.schema.DAOGenerator;
-import lc.eo.schema.ElementElementDAO;
-import lc.eo.schema.SchemaElementDAO;
-import lc.eo.schema.util.AttributeUtil;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-
-import java.io.File;
-import java.io.IOException;
-
-public class SchemaGenerator {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(SchemaGenerator.class);
-
- // Observe the procedures of a general alert
-
- public static EO generate() {
- final EO schema = lc.eo.schema.util.SchemaUtil.create("hello.schema");
-
- final EO entityDeathEvent = ElementElementDAO.create();
- SchemaElementDAO.getElements(schema).add(entityDeathEvent);
- ElementElementDAO.setEoType(entityDeathEvent, "lc.minecraft.link.v2.entity_death_event");
- ElementElementDAO.getAttributes(entityDeathEvent).add(AttributeUtil.create("entity_name", "Entity Name"));
- ElementElementDAO.getAttributes(entityDeathEvent).add(AttributeUtil.create("killer_uuid", "Killer UUID", EODataType.uuid));
- ElementElementDAO.getAttributes(entityDeathEvent).add(AttributeUtil.create("killer_name", "Killer Name"));
- ElementElementDAO.getAttributes(entityDeathEvent).add(AttributeUtil.create("server_name", "Server Name"));
- ElementElementDAO.getAttributes(entityDeathEvent).add(AttributeUtil.create("world_name", "World Name"));
-
- return schema;
- }
-
- public static void main(final String... args) throws IOException {
- UniversalJob.banner(logger, "SchemaGenerator mk3", "2014-2022 Alex Leigh");
- final EO schema = generate();
- final DAOGenerator gen = new DAOGenerator(new File("lc-minecraft-link-sdk//src/main/java"));
- gen.generateJava("lc.minecraft.link.sdk.schema.v2", schema);
- logger.info("Generated schema: {}", schema);
- }
-}
diff --git a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/schema/v2/Entity_death_eventElementDAO.java b/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/schema/v2/Entity_death_eventElementDAO.java
deleted file mode 100644
index 40eb13d166318431f82883590973c55aa78df68f..0000000000000000000000000000000000000000
--- a/lc-minecraft-link-sdk/src/main/java/lc/minecraft/link/sdk/schema/v2/Entity_death_eventElementDAO.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright (c) 2014-2022, by Alex Leigh.
- * All rights reserved.
- *
- * THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE
- * The copyright notice above does not evidence any
- * actual or intended publication of such source code.
- */
-
-package lc.minecraft.link.sdk.schema.v2;
-
-import lc.eo.EO;
-
-/**
- * This is a dynamically generated DAO class for accessing objects within an ESP graph. This class
- * has been generated by DAOGenerator
and should not be modified.
- *
- * @author DAOGenerator (synthetically generated class)
- */
-public final class Entity_death_eventElementDAO {
- public static final String API_TYPE = "lc.minecraft.link.v2.entity_death_event";
-
- public static EO create() {
- EO eo = new EO(API_TYPE);
- return eo;
- }
-
- public static boolean assertType(final EO eo) {
- return eo.getType().equals(API_TYPE);
- }
-
- public static final String KEY_ENTITY_NAME = "entity_name";
- public static final String KEY_KILLER_UUID = "killer_uuid";
- public static final String KEY_KILLER_NAME = "killer_name";
- public static final String KEY_SERVER_NAME = "server_name";
- public static final String KEY_WORLD_NAME = "world_name";
-
- public static String apiType(final EO eo) {
- return eo.getType();
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=entity_name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Entity Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getEntity_name(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_ENTITY_NAME);
- }
-
- public static void setEntity_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_ENTITY_NAME, value);
- }
-
- public static void setEntity_nameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_ENTITY_NAME, label);
- }
-
- public static void getLabelOrValueEntity_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_ENTITY_NAME);
- }
-
- public static void setIfUnsetEntity_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_ENTITY_NAME, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=killer_uuid, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Killer UUID, locale=en}, meta=null}]} , type=uuid}, meta=null} */
-
-
- public static java.util.UUID getKiller_uuid(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueUUID(KEY_KILLER_UUID);
- }
-
- public static void setKiller_uuid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_KILLER_UUID, value);
- }
-
- public static void setKiller_uuidLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_KILLER_UUID, label);
- }
-
- public static void getLabelOrValueKiller_uuid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_KILLER_UUID);
- }
-
- public static void setIfUnsetKiller_uuid(final EO eo, java.util.UUID value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_KILLER_UUID, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=killer_name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Killer Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getKiller_name(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_KILLER_NAME);
- }
-
- public static void setKiller_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_KILLER_NAME, value);
- }
-
- public static void setKiller_nameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_KILLER_NAME, label);
- }
-
- public static void getLabelOrValueKiller_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_KILLER_NAME);
- }
-
- public static void setIfUnsetKiller_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_KILLER_NAME, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=server_name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=Server Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getServer_name(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_SERVER_NAME);
- }
-
- public static void setServer_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_SERVER_NAME, value);
- }
-
- public static void setServer_nameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_SERVER_NAME, label);
- }
-
- public static void getLabelOrValueServer_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_SERVER_NAME);
- }
-
- public static void setIfUnsetServer_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_SERVER_NAME, value);
- }
-
-
- /* EO{typeName='lc.eo.schema.Attribute', data={eoType=world_name, label=EOLoop{array=[EO{typeName='lc.eo.schema.Text', data={text=World Name, locale=en}, meta=null}]} , type=string}, meta=null} */
-
-
- public static java.lang.String getWorld_name(EO eo) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- return eo.getValueString(KEY_WORLD_NAME);
- }
-
- public static void setWorld_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValue(KEY_WORLD_NAME, value);
- }
-
- public static void setWorld_nameLabel(final EO eo, final String label) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setLabel(KEY_WORLD_NAME, label);
- }
-
- public static void getLabelOrValueWorld_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.getLabelOrValue(KEY_WORLD_NAME);
- }
-
- public static void setIfUnsetWorld_name(final EO eo, java.lang.String value) {
- if (!assertType(eo))
- throw new IllegalArgumentException("Mismatched EO type: [found: " + eo.getType() + "] [expected: " + API_TYPE + "]");
- eo.setValueIfUnset(KEY_WORLD_NAME, value);
-}
-
-
-}
\ No newline at end of file
diff --git a/lc-minecraft-link/.gitignore b/lc-minecraft-link/.gitignore
deleted file mode 100644
index ede7310f5aa22a8bb16f7393dedac5d9b0cba2c9..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/.gitignore
+++ /dev/null
@@ -1,25 +0,0 @@
-kabe.log
-velocity.log
-.DS_Store
-statistic.xml
-.gradle
-/*/build/
-/build/
-gradle-app.setting
-!gradle-wrapper.jar
-.gradletasknamecache
-.idea/
-*.iml
-out/
-.cache
-node_modules
-/*/.idea
-.vi
-/*/graph.eo
-.vai
-graph.bak
-.idea
-tests.vai
-vds.log
-hosts.json
-tests.vai.old
\ No newline at end of file
diff --git a/lc-minecraft-link/LICENSE.md b/lc-minecraft-link/LICENSE.md
deleted file mode 100644
index 45a6272868993549245a703bf2a3445014e32577..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/LICENSE.md
+++ /dev/null
@@ -1,492 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and other kinds of works.
-
-The licenses for most software and other practical works are designed to take away your freedom to share and change the
-works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use
-the GNU General Public License for most of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make
-sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive
-source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and
-that you know you can do these things.
-
-To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights.
-Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients
-the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must
-show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software.
-For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems
-will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified versions of the software inside them, although
-the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the
-software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely
-where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we stand ready to extend this provision to those
-domains in future versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States should not allow patents to restrict
-development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger
-that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License. Each licensee is addressed as *you*. *
-Licensees* and *recipients* may be individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
-other than the making of an exact copy. The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the Program.
-
-To *propagate* a work means to do anything with it that, without permission, would make you directly or secondarily
-liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification), making available to the public, and in some
-countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction
-with a user through a computer network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the extent that it includes a convenient and
-prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the extent that warranties are provided), that
- licensees may convey the work under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this
-criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making modifications to it. *Object code* means
-any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard defined by a recognized standards body,
-or, in the case of interfaces specified for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the work as a whole, that (a) is included in
-the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to
-enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is
-available to the public in source code form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a
-compiler used to produce the work, or an object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source code needed to generate, install, and (
-for an executable work) run the object code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs
-which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding
-Source includes interface definition files associated with source files for the work, and the source code for shared
-libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data
-communication or control flow between those subprograms and other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate automatically from other parts of the
-Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided
-the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program.
-The output from running a covered work is covered by this License only if the output, given its content, constitutes a
-covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
-
-You may make, run and propagate covered works that you do not convey, without conditions so long as your license
-otherwise remains in force. You may convey covered works to others for the sole purpose of having them make
-modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that
-prohibit them from making any copies of your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not
-allowed; section 10 makes it unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling
-obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the
-extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you
-disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users,
-your or third parties' legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating
-that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices
-of the absence of any warranty; and give all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for
-a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source
-code under the terms of section 4, provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under this License and any conditions added under
- section 7. This requirement modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy.
- This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and
- all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other
- way, but it does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program
- has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
-
-A compilation of a covered work with other separate and independent works, which are not by their nature extensions of
-the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or
-distribution medium, is called an *aggregate* if the compilation and its resulting copyright are not used to limit the
-access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
-- b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the object code either
-
-1. a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable
- physical medium customarily used for software interchange, for a price no more than your reasonable cost of
- physically performing this conveying of source, or
-2. access to copy the Corresponding Source from a network server at no charge.
-
-- c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source.
- This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such
- an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same
- place at no further charge. You need not require recipients to copy the Corresponding Source along with the object
- code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server
- operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions
- next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these
- requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and
- Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
-
-A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library,
-need not be included in conveying the object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is normally used for personal, family, or
- household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a
-particular product received by a particular user, *normally used* refers to a typical or common use of that class of
-product, regardless of the status of the particular user or of the way in which the particular user actually uses, or
-expects or is expected to use, the product. A product is a consumer product regardless of whether the product has
-substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of
-the product.
-
-*Installation Information* for a User Product means any methods, procedures, authorization keys, or other information
-required to install and execute modified versions of a covered work in that User Product from a modified version of its
-Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code
-is in no case prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the
-conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to
-the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding
-Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not
-apply if neither you nor any third party retains the ability to install modified object code on the User Product (for
-example, the work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a requirement to continue to provide support
-service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product
-in which it has been modified or installed. Access to a network may be denied when the modification itself materially
-and adversely affects the operation of the network or violates the rules and protocols for communication across the
-network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format
-that is publicly documented (and with an implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by making exceptions from one or more of
-its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable law. If additional permissions apply only
-to part of the Program, that part may be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or
-from any part of it. (Additional permissions may be written to require their own removal in certain cases when you
-modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have
-or can give appropriate copyright permission.
-
-Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by
-the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the
- Appropriate Legal Notices displayed by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material
- be marked in reasonable ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
-- e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified
- versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual
- assumptions directly impose on those licensors and authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that
-it is governed by this License along with a term that is a further restriction, you may remove that term. If a license
-document contains a further restriction but permits relicensing or conveying under this License, you may add to a
-covered work material governed by the terms of that license document, provided that the further restriction does not
-survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a
-statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as
-exceptions; the above requirements apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to
-propagate or modify it is void, and will automatically terminate your rights under this License (including any patent
-licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days
- after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you
-of the violation by some reasonable means, this is the first time you have received notice of violation of this
-License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the
-notice.
-
-Termination of your rights under this section does not terminate the licenses of parties who have received copies or
-rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a
-covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not
-require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered
-work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run,
-modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third
-parties with this License.
-
-An *entity transaction* is a transaction transferring control of an organization, or substantially all assets of one, or
-subdividing an organization, or merging organizations. If propagation of a covered work results from an entity
-transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work
-the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For
-example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License,
-and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent
-claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of the Program or a work on which the
-Program is based. The work thus licensed is called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or controlled by the contributor, whether already
-acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or
-selling its contributor version, but do not include claims that would be infringed only as a consequence of further
-modification of the contributor version. For purposes of this definition, *control* includes the right to grant patent
-sublicenses in a manner consistent with the requirements of this License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential
-patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its
-contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement or commitment, however denominated, not
-to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement).
-To *grant* such a patent license to a party means to make such an agreement or commitment not to enforce a patent
-against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not
-available for anyone to copy, free of charge and under the terms of this License, through a publicly available network
-server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream
- recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent license, your conveying the covered work in
-a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in
-that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring
-conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is
-automatically extended to all recipients of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of its coverage, prohibits the exercise of,
-or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You
-may not convey a covered work if you are a party to an arrangement with a third party that is in the business of
-distributing software, under which you make payment to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a
-discriminatory patent license
-
-- a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or
-- b) primarily for and in connection with specific products or compilations that contain the covered work, unless you
- entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to
-infringement that may otherwise be available to you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this
-License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to
-satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence
-you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further
-conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License
-would be to refrain entirely from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work
-licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the
-resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special
-requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply
-to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to
-time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new
-problems or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the
-GNU General Public License *or any later version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by the Free Software Foundation. If the
-Program does not specify a version number of the GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used,
-that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the
-Program.
-
-Later license versions may give you additional or different permissions. However, no additional obligations are imposed
-on any author or copyright holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING
-THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
-IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
-THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
-ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO
-MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
-LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
-TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to
-their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program
-in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve
-this is to make it free software which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to
-most effectively state the exclusion of warranty; and each file should have at least the *copyright* line and a pointer
-to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of
-course, your program's commands might be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if any, to sign a *copyright disclaimer* for
-the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is
-a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-minecraft-link/build.gradle b/lc-minecraft-link/build.gradle
deleted file mode 100644
index db32c8463c96f1cfebe934dc5e4056dbeceecf6b..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/build.gradle
+++ /dev/null
@@ -1,57 +0,0 @@
-plugins {
- id 'java'
- id 'java-library'
- id 'application'
-}
-
-// Version also located in plugin.yml!
-group 'leigh'
-version '2.5'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
- mavenLocal()
- maven { url 'https://jitpack.io' }
- // Bukkit
- maven {
- url = 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/'
- content {
- includeGroup 'org.bukkit'
- includeGroup 'org.spigotmc'
- }
- }
- maven { url 'https://repo.essentialsx.net/releases/' }
- // WorldEdit
- maven { url "https://maven.enginehub.org/repo/" }
- // Dynmap
- maven {url "https://repo.mikeprimm.com/"}
- // QuickShop
- maven {url "https://repo.codemc.io/repository/maven-public/"}
- // Quests
- maven { url 'https://jitpack.io' }
-}
-
-dependencies {
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
- compileOnly 'org.bukkit:bukkit:1.15.2-R0.1-SNAPSHOT'
- compileOnly 'us.dynmap:dynmap-api:3.0-SNAPSHOT'
- compileOnly 'com.sk89q.worldedit:worldedit-bukkit:7.2.9'
- implementation group: 'c3p0', name: 'c3p0', version: '0.9.1.2'
- implementation project(':lc-esp-sdk')
- implementation project(':lc-minecraft-link-sdk')
-}
-
-test {
- useJUnitPlatform()
-}
-
-jar {
- duplicatesStrategy = DuplicatesStrategy.INCLUDE
- from {
- configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/BukkitEventHandler.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/BukkitEventHandler.java
deleted file mode 100644
index d99c90424b4e4d189f0325330faa23fff789d4c8..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/BukkitEventHandler.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package lc.minecraft.link;
-
-import lc.eo.EO;
-import lc.esp.sdk.*;
-import lc.esp.sdk.telemetry.TelemetryFrame;
-import lc.esp.sdk.telemetry.TelemetrySymbol;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.minecraft.link.sdk.LinkAddresses;
-import lc.minecraft.link.sdk.schema.v2.Entity_death_eventElementDAO;
-import org.bukkit.Location;
-import org.bukkit.entity.Player;
-import org.bukkit.event.EventHandler;
-import org.bukkit.event.Listener;
-import org.bukkit.event.block.BlockBreakEvent;
-import org.bukkit.event.block.BlockPlaceEvent;
-import org.bukkit.event.entity.EntityDeathEvent;
-import org.bukkit.event.player.PlayerMoveEvent;
-
-import java.time.Instant;
-
-/**
- * This class implements a bukkit {@link Listener} which is primarily responsible for receiving events from the server
- * and broadcasting a telemetry signal in realtime to ESP.
- *
- * @author Alex Leigh
- * @since mk2
- */
-public class BukkitEventHandler implements Listener {
- private final String serverName;
- public static final String KEY_LOC_TO_X = "lc.mc.loc.dst.x";
- public static final String KEY_LOC_TO_Y = "lc.mc.loc.dst.y";
- public static final String KEY_LOC_TO_Z = "lc.mc.loc.dst.z";
- public static final String KEY_LOC_TO_YAW = "lc.mc.loc.dst.yaw";
- public static final String KEY_LOC_TO_PITCH = "lc.mc.loc.dst.pitch";
- public static final String KEY_LOC_FROM_X = "lc.mc.loc.src.x";
- public static final String KEY_LOC_FROM_Y = "lc.mc.loc.src.y";
- public static final String KEY_LOC_FROM_Z = "lc.mc.loc.src.z";
- public static final String KEY_LOC_FROM_YAW = "lc.mc.loc.src.yaw";
- public static final String KEY_LOC_FROM_PITCH = "lc.mc.loc.src.pitch";
- public static final String KEY_EVENT_TYPE = "lc.mc.evt.typ";
- public static final String KEY_BLOCK_TYPE = "lc.mc.blk.typ";
- public static final String KEY_SERVER = "lc.mc.server";
- public static final String KEY_PLAYER_NAME = "lc.mc.player_name";
- public static final String KEY_WORLD = "lc.mc.world";
- public static final String EVENT_BLOCK_BREAK = "lc.mc.evt.blk_break";
- public static final String EVENT_BLOCK_PLACE = "lc.mc.evt.blk_place";
- public static final String EVENT_PLAYER_MOVE = "lc.mc.evt.blk_brk.player_mv";
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(BukkitEventHandler.class);
- private final ESPClient esp;
- private final ESPProducer producer;
- private final AsyncSender sender;
-
- public BukkitEventHandler(String serverName) throws Exception {
- this.serverName = serverName;
- esp = new ESPClient();
- esp.start();
- ESPSession session = esp.createSession();
- producer = session.createProducer(LinkAddresses.getTelemetryAddress());
- sender = new AsyncSender(producer);
- new Thread(sender).start();
- }
-
- private void addDstLocation(TelemetryFrame values, Location loc) {
- addValue(values, KEY_LOC_TO_X, Double.toString(loc.getX()));
- addValue(values, KEY_LOC_TO_Y, Double.toString(loc.getY()));
- addValue(values, KEY_LOC_TO_Z, Double.toString(loc.getZ()));
- }
-
- private void addSrcLocation(TelemetryFrame values, Location loc) {
- addValue(values, KEY_LOC_FROM_X, Double.toString(loc.getX()));
- addValue(values, KEY_LOC_FROM_Y, Double.toString(loc.getY()));
- addValue(values, KEY_LOC_FROM_Z, Double.toString(loc.getZ()));
- }
-
- private void addDstAngle(TelemetryFrame values, Location loc) {
- // FIXME: Should set UOM to degrees!
- addValue(values, KEY_LOC_TO_YAW, Double.toString(loc.getYaw()));
- addValue(values, KEY_LOC_TO_PITCH, Double.toString(loc.getPitch()));
- }
-
- private void addSrcAngle(TelemetryFrame values, Location loc) {
- // FIXME: Should set UOM to degrees!
- addValue(values, KEY_LOC_FROM_YAW, Double.toString(loc.getYaw()));
- addValue(values, KEY_LOC_FROM_PITCH, Double.toString(loc.getPitch()));
- }
-
- private void addValue(TelemetryFrame values, String key, String value) {
- values.addSymbol(new TelemetrySymbol(key, value, true));
- }
-
- @EventHandler(ignoreCancelled = true)
- public void onBlockBreak(BlockBreakEvent event) {
- try {
- TelemetryFrame frame = new TelemetryFrame(Instant.now());
- addDstLocation(frame, event.getBlock().getLocation());
- addSrcLocation(frame, event.getPlayer().getLocation());
- addDstLocation(frame, event.getBlock().getLocation());
- addSrcLocation(frame, event.getPlayer().getLocation());
-
- ESPMessage msg = new ESPMessage();
- msg.setParameter(KEY_SERVER, serverName);
- msg.setParameter(KEY_PLAYER_NAME, event.getPlayer().getName());
- msg.setParameter(KEY_WORLD, event.getBlock().getWorld().getName());
- msg.setParameter(KEY_EVENT_TYPE, EVENT_BLOCK_BREAK);
- msg.setPayload(frame.toJson());
- logger.trace("Sending message: {}", msg);
- sender.send(msg);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- @EventHandler(ignoreCancelled = true)
- public void onBlockPlaced(BlockPlaceEvent event) {
- try {
- TelemetryFrame frame = new TelemetryFrame(Instant.now());
- addSrcLocation(frame, event.getPlayer().getLocation());
- addDstLocation(frame, event.getBlock().getLocation());
- addValue(frame, KEY_BLOCK_TYPE, event.getBlock().getType().toString());
-
- ESPMessage msg = new ESPMessage();
- msg.setParameter(KEY_SERVER, serverName);
- msg.setParameter(KEY_PLAYER_NAME, event.getPlayer().getName());
- msg.setParameter(KEY_WORLD, event.getBlock().getWorld().getName());
- msg.setParameter(KEY_EVENT_TYPE, EVENT_BLOCK_PLACE);
- msg.setPayload(frame.toJson());
- logger.trace("Sending message: {}", msg);
- sender.send(msg);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- @EventHandler(ignoreCancelled = true)
- public void onPlayerMove(PlayerMoveEvent event) {
- try {
- TelemetryFrame frame = new TelemetryFrame(Instant.now());
- Location loc = event.getTo();
- addSrcLocation(frame, event.getFrom());
- addSrcAngle(frame, event.getFrom());
- addDstLocation(frame, event.getTo());
- addDstAngle(frame, event.getTo());
-
- ESPMessage msg = new ESPMessage();
- msg.setParameter(KEY_SERVER, serverName);
- msg.setParameter(KEY_PLAYER_NAME, event.getPlayer().getName());
- msg.setParameter(KEY_WORLD, loc.getWorld().getName());
- msg.setParameter(KEY_EVENT_TYPE, EVENT_PLAYER_MOVE);
- ;
- msg.setPayload(frame.toJson());
- sender.send(msg);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- @EventHandler
- public void onEntityDeath(EntityDeathEvent event) {
- try {
- EO msg = Entity_death_eventElementDAO.create();
- Entity_death_eventElementDAO.setEntity_name(msg, event.getEntity().getName());
- Entity_death_eventElementDAO.setServer_name(msg, serverName);
- Entity_death_eventElementDAO.setWorld_name(msg, event.getEntity().getWorld().getName());
- Player killer = event.getEntity().getKiller();
- if (killer != null) {
- Entity_death_eventElementDAO.setKiller_name(msg, killer.getName());
- Entity_death_eventElementDAO.setKiller_uuid(msg, killer.getUniqueId());
- }
- ESPMessage espMsg = new ESPMessage();
- espMsg.setPayload(msg);
- logger.info("Sending msg: {}", espMsg);
- sender.send(espMsg);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/LinkPlugin.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/LinkPlugin.java
deleted file mode 100644
index 8e05e9b6a1dc5333501efd496983ac914eeb0a74..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/LinkPlugin.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package lc.minecraft.link;
-
-import lc.mecha.cred.Credential;
-import lc.mecha.cred.CredentialConnection;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.UniversalJob;
-import lc.minecraft.link.bukkit.BukkitReputationEventHandler;
-import lc.minecraft.link.faction.Faction;
-import lc.minecraft.link.faction.Reputation;
-import org.apache.commons.lang.StringUtils;
-import org.bukkit.Bukkit;
-import org.bukkit.configuration.file.FileConfiguration;
-import org.bukkit.entity.Player;
-import org.bukkit.event.Listener;
-import org.bukkit.plugin.java.JavaPlugin;
-import org.bukkit.scheduler.BukkitRunnable;
-
-import javax.jms.JMSException;
-import java.io.IOException;
-import java.util.UUID;
-
-/**
- * This class implements a Bukkit plugin providing a RESTful API service compatible with the
- * LEIGH&CO Enhanced Services Platform (ESP). The service may be used directly or may be placed
- * onto an ESP grid behind a Kabe server.
- *
- * The purpose of the service is to provide the means to author Minecraft extensions / plugins that
- * run in a distributed fashion independently of the Bukkit server instance. This allows plugins
- * to be added, removed, restarted dynamically without interfering or restarting the Bukkit server,
- * provides insulation for the external services to be immune from server version changes, and opens
- * the opportunity for the external services to be written in any programming language.
- *
- * @author Alex Leigh
- */
-public class LinkPlugin extends JavaPlugin implements Listener {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(LinkPlugin.class);
- public static final String CFG_KEY_SERVERNAME = "serverName";
- private final Reputation reputation;
-
-
- public LinkPlugin() throws IOException, JMSException {
- UniversalJob.banner(logger, "Link mk2");
-
- FileConfiguration cfg = this.getConfig();
- String reputationUrl = cfg.getString("reputationUrl");
- String reputationUser = cfg.getString("reputationUser");
- String reputationPassword = cfg.getString("reputationPassword");
-
- reputation = new Reputation(new CredentialConnection(reputationUrl,
- new Credential(reputationUser, reputationPassword)), new BukkitReputationEventHandler(),
- new Faction());
- }
-
- public void onEnable() {
- getLogger().info("MinecraftLink mk1 Enabled");
- saveDefaultConfig();
- FileConfiguration config = getConfig();
- String serverName = config.getString(CFG_KEY_SERVERNAME);
-
- if (StringUtils.isEmpty(serverName)) throw new IllegalStateException("serverName must be provided.");
-
- try {
- getServer().getPluginManager().registerEvents(new BukkitEventHandler(serverName), this);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public void onDisable() {
- getLogger().info("MinecraftLink Disabled");
- }
-
- /**
- * Send a message to the given player name on the Bukkit server, if they are logged in.
- */
- public void sendMessage(String playerName, String msg) {
- new BukkitRunnable() {
- @Override
- public void run() {
- Player p = Bukkit.getPlayer(playerName);
- logger.info("Found player: {}", p);
- if (p != null) p.sendMessage(msg);
- }
- }.run();
- }
-
- public void sendMessage(UUID playerId, String msg) {
- new BukkitRunnable() {
- @Override
- public void run() {
- Player p = Bukkit.getPlayer(playerId);
- logger.info("Found player: {}", p);
- if (p != null) p.sendMessage(msg);
- }
- }.run();
- }
-
- public Reputation getReputation() {
- return reputation;
- }
-}
\ No newline at end of file
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/PlayerNameProvider.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/PlayerNameProvider.java
deleted file mode 100644
index e5dcc809517f1e0a9e4c514da6b511f33ff716b4..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/PlayerNameProvider.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package lc.minecraft.link;
-
-import java.util.UUID;
-
-/**
- * Classes implementing this interface are capable of converting a UUID playerId into a String playerName.
- *
- * @author Alex Leigh
- */
-public interface PlayerNameProvider {
- String getPlayerName(UUID playerId);
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitPlayerNameProvider.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitPlayerNameProvider.java
deleted file mode 100644
index 5f27e47930c8e4520904b8581a6bd0d7d8adbc99..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitPlayerNameProvider.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package lc.minecraft.link.bukkit;
-
-import lc.minecraft.link.PlayerNameProvider;
-import org.bukkit.Bukkit;
-import org.bukkit.OfflinePlayer;
-
-import java.util.UUID;
-
-/**
- * Implementation of {@link PlayerNameProvider} backed by Bukkit.
- *
- * @author Alex Leigh
- */
-public class BukkitPlayerNameProvider implements PlayerNameProvider {
- @Override
- /**
- * Return the name of the player. If the lookup fails for some reason (like that ID has never logged into
- * the server), then the ID is returned in string form.
- */
- public String getPlayerName(UUID playerId) {
- OfflinePlayer p = Bukkit.getOfflinePlayer(playerId);
- if (p == null) return null;
- return p.getName();
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitReputationEventHandler.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitReputationEventHandler.java
deleted file mode 100644
index 368d8042e4e3c9ab8937ca514c173c7d9cdb5868..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/bukkit/BukkitReputationEventHandler.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package lc.minecraft.link.bukkit;
-
-import lc.minecraft.link.faction.ReputationEventHandler;
-import org.bukkit.Bukkit;
-import org.bukkit.entity.Player;
-
-import java.util.UUID;
-
-/**
- * Implementation of {@link ReputationEventHandler} for the Bukkit API. This implementation sends messages to the
- * player when their reputation changes.
- *
- * @author Alex Leigh
- */
-public class BukkitReputationEventHandler implements ReputationEventHandler {
- @Override
- public void reputationChanged(UUID playerId, String factionDisplayName, int delta) {
- Player p = Bukkit.getPlayer(playerId);
- if (p != null) {
- if (delta >= 0) {
- p.sendMessage("Your reputation with " + factionDisplayName + " has increased by " + delta + " points.");
- } else {
- int x = -delta;
- p.sendMessage("Your reputation with " + factionDisplayName + " has decreased by " + x + " points.");
- }
- }
- }
-
- @Override
- public void reputationLevelChanged(UUID playerId, String factionDisplayName, String factionLevel) {
- Player p = Bukkit.getPlayer(playerId);
- if (p != null) {
- p.sendMessage(factionDisplayName + " now consider you " + factionLevel + "!");
- }
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Faction.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Faction.java
deleted file mode 100644
index ca08c212a2fecb84c99e2860aa0962f53794d26e..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Faction.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package lc.minecraft.link.faction;
-
-public class Faction {
- public final static int ID_LEIGHCO = 1;
- public final static int ID_ISO = 2;
- public final static int ID_RETURNER = 3;
- public final static int ID_ENDER = 4;
-
- public String getDisplayText(int fid) {
- switch (fid) {
- case 1:
- return "LEIGH&CO";
- case 2:
- return "Isolationists";
- case 3:
- return "Returners";
- case 4:
- return "Ender-people";
- }
- return "Unknown";
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Reputation.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Reputation.java
deleted file mode 100644
index 220cecd3fefb58902c977d0a14f2d92c22b728da..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/Reputation.java
+++ /dev/null
@@ -1,181 +0,0 @@
-package lc.minecraft.link.faction;
-
-import com.mchange.v2.c3p0.ComboPooledDataSource;
-import lc.mecha.cred.Credential;
-import lc.mecha.cred.CredentialConnection;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Properties;
-import java.util.UUID;
-
-/**
- * This class manages faction reputation. Reputation is stored in MySQL in the lc_faction and lc_reputation
- * tables. Reputation is associated with a given player via their UUID, and the tables are largely self
- * explanatory.
- *
- * @author Alex Leigh
- */
-public class Reputation {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(Reputation.class);
- private final ComboPooledDataSource cpds;
- private final ReputationEventHandler handler;
- private final Faction faction;
-
- public Reputation(CredentialConnection cred, ReputationEventHandler handler, Faction faction) {
- this.handler = handler;
- this.faction = faction;
- cpds = new ComboPooledDataSource();
- cpds.setJdbcUrl(cred.getConnectionString());
- cpds.setUser(cred.getUsername());
- cpds.setPassword(cred.getPassword());
- }
-
- public static void main(String[] args) throws SQLException, IOException {
- Properties p = new Properties();
- p.load(new FileInputStream("test.properties"));
-
- Reputation r = new Reputation(new CredentialConnection("jdbc:mysql://db1.leigh-co.com/neweden",
- new Credential("neweden", p.getProperty("db1_neweden_password"))), new ReputationEventHandler() {
- @Override
- public void reputationChanged(UUID playerId, String factionDisplayName, int delta) {
- logger.info("Reputation changed. [faction: {}] [delta: {}]", factionDisplayName, delta);
- }
-
- @Override
- public void reputationLevelChanged(UUID playerId, String factionDisplayName, String factionLevel) {
- logger.info("Reputation changed. [faction: {}] [level: {}]", factionDisplayName, factionLevel);
- }
- }, new Faction());
-
- UUID player = UUID.randomUUID();
-
- r.adjustReputation(player, Faction.ID_LEIGHCO, 0);
- r.adjustReputation(player, 1, 5000);
- r.adjustReputation(player, 1, 10);
- }
-
- /**
- * Return the reputation for the given player with the given faction. If there is no faction in the database
- * for this combination, 0 is returned instead.
- *
- * This is an expensive synchronous call.
- */
- public int getReputation(UUID playerId, int factionId) throws SQLException {
- try (Connection con = cpds.getConnection()) {
- try (PreparedStatement selectPs = con.prepareStatement("SELECT (standing) FROM lc_reputation WHERE player_uuid=? AND faction_id=?")) {
- selectPs.setString(1, playerId.toString());
- selectPs.setInt(2, factionId);
- try (ResultSet rs = selectPs.executeQuery()) {
- if (rs.next()) {
- return rs.getInt(1);
- } else {
- return 0;
- }
- }
- }
- }
- }
-
- /**
- * Increase or decrease standing with the given faction for the given player by the given amount. Do not
- * call this method from within a tick, as it will crater TPS or lead to server instability. Call
- * adjustReputationAsync() instead.
- *
- * This method will not perform world-level business logic.
- */
- private void adjustReputationSync(UUID playerId, int factionId, int amount) throws SQLException {
- // We do this to avoid sending messages while in the txn
- String levelChanged = null;
-
- try (Connection con = cpds.getConnection()) {
- con.setAutoCommit(false);
- try (PreparedStatement selectPs = con.prepareStatement("SELECT (standing) FROM lc_reputation WHERE player_uuid=? AND faction_id=?")) {
- selectPs.setString(1, playerId.toString());
- selectPs.setInt(2, factionId);
-
- try (ResultSet rs = selectPs.executeQuery()) {
- if (rs.next()) {
- int oldAmt = rs.getInt(1);
- int newAmt = oldAmt + amount;
-
- try (PreparedStatement insertPs = con.prepareStatement("UPDATE lc_reputation SET standing=? WHERE player_uuid=? AND faction_id=?")) {
- insertPs.setInt(1, newAmt);
- insertPs.setString(2, playerId.toString());
- insertPs.setInt(3, factionId);
- insertPs.execute();
- }
-
- String oldLevel = ReputationLevel.getDisplayText(oldAmt);
- String newLevel = ReputationLevel.getDisplayText(newAmt);
-
- if (!oldLevel.equals(newLevel)) {
- levelChanged = newLevel;
- }
- } else {
- try (PreparedStatement updatePs = con.prepareStatement("INSERT INTO lc_reputation (player_uuid,faction_id,standing) VALUES (?,?,?)")) {
- updatePs.setString(1, playerId.toString());
- updatePs.setInt(2, factionId);
- updatePs.setInt(3, amount);
- updatePs.execute();
- }
-
- // If they had no rep before, but now do, we always announce it.
- levelChanged = ReputationLevel.getDisplayText(amount);
- }
- }
- }
-
- con.commit();
- String factionName = faction.getDisplayText(factionId);
- handler.reputationChanged(playerId, factionName, amount);
- if (levelChanged != null) {
- handler.reputationLevelChanged(playerId, factionName, levelChanged);
- }
- }
- }
-
- /**
- * Increase or decrease standing with the given faction for the given player by the given amount. This method
- * is safe to call from within a tick.
- *
- * This method will not perform world-level business logic.
- */
- private void adjustReputationAsync(UUID playerId, int faction, int amount) {
- // TODO Actually use a worker pool here
-
- new Thread(() -> {
- try {
- adjustReputationSync(playerId, faction, amount);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }).start();
- }
-
- /**
- * Adjust the reputation for the player, and perform world-specific business rules.
- */
- public void adjustReputation(UUID playerId, int factionId, int amount) {
- adjustReputationAsync(playerId, factionId, amount);
-
- int x = -amount / 2;
-
- if (x != 0) {
- switch (factionId) {
- case Faction.ID_LEIGHCO:
- adjustReputationAsync(playerId, Faction.ID_RETURNER, x);
- break;
- case Faction.ID_RETURNER:
- adjustReputationAsync(playerId, Faction.ID_LEIGHCO, x);
- break;
- }
- }
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationEventHandler.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationEventHandler.java
deleted file mode 100644
index 4f5c40e6bd046346a5fd0c21fe5d2929211acb7d..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationEventHandler.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package lc.minecraft.link.faction;
-
-import java.util.UUID;
-
-/**
- * Classes implementing this interface can receive events when reputation changes occur for players.
- *
- * @author Alex Leigh
- */
-
-public interface ReputationEventHandler {
- void reputationChanged(UUID playerId, String factionDisplayName, int delta);
-
- void reputationLevelChanged(UUID playerId, String factionDisplayName, String factionLevel);
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationLevel.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationLevel.java
deleted file mode 100644
index 2ec75f6ebb899052d0ae70c91e9fe0c98f67502b..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/faction/ReputationLevel.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package lc.minecraft.link.faction;
-
-/**
- * A player's reputation level as an enum.
- *
- * @author Alex Leigh
- * @since mk2
- */
-public enum ReputationLevel {
- EXALTED, REVERED, HONORED, FRIENDLY, NEUTRAL, UNFRIENDLY, HOSTILE, HATED;
-
- public final static int LEVEL_EXALTED = 85000;
- public final static int LEVEL_REVERED = 84000;
- public final static int LEVEL_HONORED = 63000;
- public final static int LEVEL_FRIENDLY = 51000;
- public final static int LEVEL_NEUTRAL = 45000;
- public final static int LEVEL_UNFRIENDLY = 42000;
- public final static int LEVEL_HOSTILE = 39000;
- public final static int LEVEL_HATED = 36000;
-
- public static ReputationLevel getLevel(int reputationScore) {
- if (reputationScore >= LEVEL_EXALTED) return EXALTED;
- if (reputationScore >= LEVEL_REVERED) return REVERED;
- if (reputationScore >= LEVEL_HONORED) return HONORED;
- if (reputationScore >= LEVEL_FRIENDLY) return FRIENDLY;
- if (reputationScore >= LEVEL_NEUTRAL) return NEUTRAL;
- if (reputationScore >= LEVEL_UNFRIENDLY) return UNFRIENDLY;
- if (reputationScore >= LEVEL_HOSTILE) return HOSTILE;
- return HATED;
- }
-
- /**
- * Given a reputation score, return the display text for the score, such as "Exalted" or "Revered".
- *
- * @param reputationScore The score to evasluate.
- * @return The display text.
- */
- public static String getDisplayText(int reputationScore) {
- ReputationLevel level = ReputationLevel.EXALTED.getLevel(reputationScore);
- switch (level) {
- case EXALTED:
- return "Exalted";
- case REVERED:
- return "Revered";
- case HONORED:
- return "Honored";
- case FRIENDLY:
- return "Friendly";
- case NEUTRAL:
- return "Neutral";
- case UNFRIENDLY:
- return "Unfriendly";
- case HOSTILE:
- return "Hostile";
- case HATED:
- return "Hated";
- default:
- throw new IllegalStateException("Unknown reputation level");
- }
- }
-}
diff --git a/lc-minecraft-link/src/main/java/lc/minecraft/link/test/MockPlayerNameProvider.java b/lc-minecraft-link/src/main/java/lc/minecraft/link/test/MockPlayerNameProvider.java
deleted file mode 100644
index 11e35529a61261de9228718a0a2e0d0ce20a118e..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/java/lc/minecraft/link/test/MockPlayerNameProvider.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package lc.minecraft.link.test;
-
-import lc.minecraft.link.PlayerNameProvider;
-
-import java.util.Objects;
-import java.util.UUID;
-
-/**
- * Mock implementation of {@link PlayerNameProvider} which is useful for testing outside of the Bukkit environment.
- *
- * @author Alex Leigh
- */
-public class MockPlayerNameProvider implements PlayerNameProvider {
- private final String fakeName;
-
- public MockPlayerNameProvider(String fakeName) {
- this.fakeName = fakeName;
- }
-
- @Override
- public String getPlayerName(UUID playerId) {
- return fakeName;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- MockPlayerNameProvider that = (MockPlayerNameProvider) o;
- return Objects.equals(fakeName, that.fakeName);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(fakeName);
- }
-
- @Override
- public String toString() {
- return "MockUsernameProvider{" +
- "fakeName='" + fakeName + '\'' +
- '}';
- }
-}
diff --git a/lc-minecraft-link/src/main/resources/config.yml b/lc-minecraft-link/src/main/resources/config.yml
deleted file mode 100644
index fb1c130958d839cd2f62049c3962219a1cb55a10..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/resources/config.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-# Intentionally Blank
-
-serverName: changeme
-reputationUrl: jdbc:mysql://db1.leigh-co.com/neweden
- reputationUser:neweden
- reputationPassword:abc123
diff --git a/lc-minecraft-link/src/main/resources/plugin.yml b/lc-minecraft-link/src/main/resources/plugin.yml
deleted file mode 100644
index da484fec080276f47c5840725d9e1038a99a4b9a..0000000000000000000000000000000000000000
--- a/lc-minecraft-link/src/main/resources/plugin.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-name: lc-minecraft-link
-main: lc.minecraft.link.LinkPlugin
-version: 2.2
-api-version: 1.17
-depend: [ ]
\ No newline at end of file
diff --git a/lc-minecraft-terrain/.gitignore b/lc-minecraft-terrain/.gitignore
deleted file mode 100644
index ede7310f5aa22a8bb16f7393dedac5d9b0cba2c9..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/.gitignore
+++ /dev/null
@@ -1,25 +0,0 @@
-kabe.log
-velocity.log
-.DS_Store
-statistic.xml
-.gradle
-/*/build/
-/build/
-gradle-app.setting
-!gradle-wrapper.jar
-.gradletasknamecache
-.idea/
-*.iml
-out/
-.cache
-node_modules
-/*/.idea
-.vi
-/*/graph.eo
-.vai
-graph.bak
-.idea
-tests.vai
-vds.log
-hosts.json
-tests.vai.old
\ No newline at end of file
diff --git a/lc-minecraft-terrain/LICENSE.md b/lc-minecraft-terrain/LICENSE.md
deleted file mode 100644
index 45a6272868993549245a703bf2a3445014e32577..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/LICENSE.md
+++ /dev/null
@@ -1,492 +0,0 @@
-# GNU GENERAL PUBLIC LICENSE
-
-Version 3, 29 June 2007
-
-Copyright (C) 2007 [Free Software Foundation, Inc.](http://fsf.org/)
-
-Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
-
-## Preamble
-
-The GNU General Public License is a free, copyleft license for software and other kinds of works.
-
-The licenses for most software and other practical works are designed to take away your freedom to share and change the
-works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all
-versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use
-the GNU General Public License for most of our software; it applies also to any other work released this way by its
-authors. You can apply it to your programs, too.
-
-When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make
-sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive
-source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and
-that you know you can do these things.
-
-To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights.
-Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it:
-responsibilities to respect the freedom of others.
-
-For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients
-the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must
-show them these terms so they know their rights.
-
-Developers that use the GNU GPL protect your rights with two steps:
-
-1. assert copyright on the software, and
-2. offer you this License giving you legal permission to copy, distribute and/or modify it.
-
-For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software.
-For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems
-will not be attributed erroneously to authors of previous versions.
-
-Some devices are designed to deny users access to install or run modified versions of the software inside them, although
-the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the
-software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely
-where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we stand ready to extend this provision to those
-domains in future versions of the GPL, as needed to protect the freedom of users.
-
-Finally, every program is threatened constantly by software patents. States should not allow patents to restrict
-development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger
-that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-The precise terms and conditions for copying, distribution and modification follow.
-
-## TERMS AND CONDITIONS
-
-### 0. Definitions.
-
-*This License* refers to version 3 of the GNU General Public License.
-
-*Copyright* also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
-
-*The Program* refers to any copyrightable work licensed under this License. Each licensee is addressed as *you*. *
-Licensees* and *recipients* may be individuals or organizations.
-
-To *modify* a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission,
-other than the making of an exact copy. The resulting work is called a *modified version* of the earlier work or a work
-*based on* the earlier work.
-
-A *covered work* means either the unmodified Program or a work based on the Program.
-
-To *propagate* a work means to do anything with it that, without permission, would make you directly or secondarily
-liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy.
-Propagation includes copying, distribution (with or without modification), making available to the public, and in some
-countries other activities as well.
-
-To *convey* a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction
-with a user through a computer network, with no transfer of a copy, is not conveying.
-
-An interactive user interface displays *Appropriate Legal Notices* to the extent that it includes a convenient and
-prominently visible feature that
-
-1. displays an appropriate copyright notice, and
-2. tells the user that there is no warranty for the work (except to the extent that warranties are provided), that
- licensees may convey the work under this License, and how to view a copy of this License.
-
-If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this
-criterion.
-
-### 1. Source Code.
-
-The *source code* for a work means the preferred form of the work for making modifications to it. *Object code* means
-any non-source form of a work.
-
-A *Standard Interface* means an interface that either is an official standard defined by a recognized standards body,
-or, in the case of interfaces specified for a particular programming language, one that is widely used among developers
-working in that language.
-
-The *System Libraries* of an executable work include anything, other than the work as a whole, that (a) is included in
-the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to
-enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is
-available to the public in source code form. A *Major Component*, in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a
-compiler used to produce the work, or an object code interpreter used to run it.
-
-The *Corresponding Source* for a work in object code form means all the source code needed to generate, install, and (
-for an executable work) run the object code and to modify the work, including scripts to control those activities.
-However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs
-which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding
-Source includes interface definition files associated with source files for the work, and the source code for shared
-libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data
-communication or control flow between those subprograms and other parts of the work.
-
-The Corresponding Source need not include anything that users can regenerate automatically from other parts of the
-Corresponding Source.
-
-The Corresponding Source for a work in source code form is that same work.
-
-### 2. Basic Permissions.
-
-All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided
-the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program.
-The output from running a covered work is covered by this License only if the output, given its content, constitutes a
-covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
-
-You may make, run and propagate covered works that you do not convey, without conditions so long as your license
-otherwise remains in force. You may convey covered works to others for the sole purpose of having them make
-modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do not control copyright. Those thus making or running
-the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that
-prohibit them from making any copies of your copyrighted material outside their relationship with you.
-
-Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not
-allowed; section 10 makes it unnecessary.
-
-### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling
-obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or
-restricting circumvention of such measures.
-
-When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the
-extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you
-disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users,
-your or third parties' legal rights to forbid circumvention of technological measures.
-
-### 4. Conveying Verbatim Copies.
-
-You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating
-that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices
-of the absence of any warranty; and give all recipients a copy of this License along with the Program.
-
-You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for
-a fee.
-
-### 5. Conveying Modified Source Versions.
-
-You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source
-code under the terms of section 4, provided that you also meet all of these conditions:
-
-- a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
-- b) The work must carry prominent notices stating that it is released under this License and any conditions added under
- section 7. This requirement modifies the requirement in section 4 to *keep intact all notices*.
-- c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy.
- This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and
- all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other
- way, but it does not invalidate such permission if you have separately received it.
-- d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program
- has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
-
-A compilation of a covered work with other separate and independent works, which are not by their nature extensions of
-the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or
-distribution medium, is called an *aggregate* if the compilation and its resulting copyright are not used to limit the
-access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other parts of the aggregate.
-
-### 6. Conveying Non-Source Forms.
-
-You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License, in one of these ways:
-
-- a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
-- b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium),
- accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or
- customer support for that product model, to give anyone who possesses the object code either
-
-1. a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable
- physical medium customarily used for software interchange, for a price no more than your reasonable cost of
- physically performing this conveying of source, or
-2. access to copy the Corresponding Source from a network server at no charge.
-
-- c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source.
- This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such
- an offer, in accord with subsection 6b.
-- d) Convey the object code by offering access from a designated place
- (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same
- place at no further charge. You need not require recipients to copy the Corresponding Source along with the object
- code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server
- operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions
- next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these
- requirements.
-- e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and
- Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
-
-A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library,
-need not be included in conveying the object code work.
-
-A *User Product* is either
-
-1. a *consumer product*, which means any tangible personal property which is normally used for personal, family, or
- household purposes, or
-2. anything designed or sold for incorporation into a dwelling.
-
-In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a
-particular product received by a particular user, *normally used* refers to a typical or common use of that class of
-product, regardless of the status of the particular user or of the way in which the particular user actually uses, or
-expects or is expected to use, the product. A product is a consumer product regardless of whether the product has
-substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of
-the product.
-
-*Installation Information* for a User Product means any methods, procedures, authorization keys, or other information
-required to install and execute modified versions of a covered work in that User Product from a modified version of its
-Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code
-is in no case prevented or interfered with solely because modification has been made.
-
-If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the
-conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to
-the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding
-Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not
-apply if neither you nor any third party retains the ability to install modified object code on the User Product (for
-example, the work has been installed in ROM).
-
-The requirement to provide Installation Information does not include a requirement to continue to provide support
-service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product
-in which it has been modified or installed. Access to a network may be denied when the modification itself materially
-and adversely affects the operation of the network or violates the rules and protocols for communication across the
-network.
-
-Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format
-that is publicly documented (and with an implementation available to the public in source code form), and must require
-no special password or key for unpacking, reading or copying.
-
-### 7. Additional Terms.
-
-*Additional permissions* are terms that supplement the terms of this License by making exceptions from one or more of
-its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were
-included in this License, to the extent that they are valid under applicable law. If additional permissions apply only
-to part of the Program, that part may be used separately under those permissions, but the entire Program remains
-governed by this License without regard to the additional permissions.
-
-When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or
-from any part of it. (Additional permissions may be written to require their own removal in certain cases when you
-modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have
-or can give appropriate copyright permission.
-
-Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by
-the copyright holders of that material)
-supplement the terms of this License with terms:
-
-- a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
-- b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the
- Appropriate Legal Notices displayed by works containing it; or
-- c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material
- be marked in reasonable ways as different from the original version; or
-- d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
-- e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
-- f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified
- versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual
- assumptions directly impose on those licensors and authors.
-
-All other non-permissive additional terms are considered *further restrictions*
-within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that
-it is governed by this License along with a term that is a further restriction, you may remove that term. If a license
-document contains a further restriction but permits relicensing or conveying under this License, you may add to a
-covered work material governed by the terms of that license document, provided that the further restriction does not
-survive such relicensing or conveying.
-
-If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a
-statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
-
-Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as
-exceptions; the above requirements apply either way.
-
-### 8. Termination.
-
-You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to
-propagate or modify it is void, and will automatically terminate your rights under this License (including any patent
-licenses granted under the third paragraph of section 11).
-
-However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated
-
-- a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and
-- b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days
- after the cessation.
-
-Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you
-of the violation by some reasonable means, this is the first time you have received notice of violation of this
-License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the
-notice.
-
-Termination of your rights under this section does not terminate the licenses of parties who have received copies or
-rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not
-qualify to receive new licenses for the same material under section 10.
-
-### 9. Acceptance Not Required for Having Copies.
-
-You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a
-covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not
-require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered
-work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-### 10. Automatic Licensing of Downstream Recipients.
-
-Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run,
-modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third
-parties with this License.
-
-An *entity transaction* is a transaction transferring control of an organization, or substantially all assets of one, or
-subdividing an organization, or merging organizations. If propagation of a covered work results from an entity
-transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work
-the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with
-reasonable efforts.
-
-You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For
-example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License,
-and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent
-claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
-
-### 11. Patents.
-
-A *contributor* is a copyright holder who authorizes use under this License of the Program or a work on which the
-Program is based. The work thus licensed is called the contributor's *contributor version*.
-
-A contributor's *essential patent claims* are all patent claims owned or controlled by the contributor, whether already
-acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or
-selling its contributor version, but do not include claims that would be infringed only as a consequence of further
-modification of the contributor version. For purposes of this definition, *control* includes the right to grant patent
-sublicenses in a manner consistent with the requirements of this License.
-
-Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential
-patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its
-contributor version.
-
-In the following three paragraphs, a *patent license* is any express agreement or commitment, however denominated, not
-to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement).
-To *grant* such a patent license to a party means to make such an agreement or commitment not to enforce a patent
-against the party.
-
-If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not
-available for anyone to copy, free of charge and under the terms of this License, through a publicly available network
-server or other readily accessible means, then you must either
-
-1. cause the Corresponding Source to be so available, or
-2. arrange to deprive yourself of the benefit of the patent license for this particular work, or
-3. arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream
- recipients.
-
-*Knowingly relying* means you have actual knowledge that, but for the patent license, your conveying the covered work in
-a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in
-that country that you have reason to believe are valid.
-
-If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring
-conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing
-them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is
-automatically extended to all recipients of the covered work and works based on it.
-
-A patent license is *discriminatory* if it does not include within the scope of its coverage, prohibits the exercise of,
-or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You
-may not convey a covered work if you are a party to an arrangement with a third party that is in the business of
-distributing software, under which you make payment to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a
-discriminatory patent license
-
-- a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or
-- b) primarily for and in connection with specific products or compilations that contain the covered work, unless you
- entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
-
-Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to
-infringement that may otherwise be available to you under applicable patent law.
-
-### 12. No Surrender of Others' Freedom.
-
-If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this
-License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to
-satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence
-you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further
-conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License
-would be to refrain entirely from conveying the Program.
-
-### 13. Use with the GNU Affero General Public License.
-
-Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work
-licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the
-resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special
-requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply
-to the combination as such.
-
-### 14. Revised Versions of this License.
-
-The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to
-time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new
-problems or concerns.
-
-Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the
-GNU General Public License *or any later version* applies to it, you have the option of following the terms and
-conditions either of that numbered version or of any later version published by the Free Software Foundation. If the
-Program does not specify a version number of the GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used,
-that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the
-Program.
-
-Later license versions may give you additional or different permissions. However, no additional obligations are imposed
-on any author or copyright holder as a result of your choosing to follow a later version.
-
-### 15. Disclaimer of Warranty.
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING
-THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM *AS IS* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
-IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
-THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
-ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-### 16. Limitation of Liability.
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO
-MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
-INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO
-LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
-TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGES.
-
-### 17. Interpretation of Sections 15 and 16.
-
-If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to
-their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil
-liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program
-in return for a fee.
-
-## END OF TERMS AND CONDITIONS ###
-
-### How to Apply These Terms to Your New Programs
-
-If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve
-this is to make it free software which everyone can redistribute and change under these terms.
-
-To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to
-most effectively state the exclusion of warranty; and each file should have at least the *copyright* line and a pointer
-to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w` and `show c` should show the appropriate parts of the General Public License. Of
-course, your program's commands might be different; for a GUI interface, you would use an *about box*.
-
-You should also get your employer (if you work as a programmer) or school, if any, to sign a *copyright disclaimer* for
-the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see
-[http://www.gnu.org/licenses/](http://www.gnu.org/licenses/).
-
-The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is
-a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If
-this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read
-[http://www.gnu.org/philosophy/why-not-lgpl.html](http://www.gnu.org/philosophy/why-not-lgpl.html).
diff --git a/lc-minecraft-terrain/README.md b/lc-minecraft-terrain/README.md
deleted file mode 100644
index 4d2c5b872467af57371b79bd00374059a0ebebe4..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-This module contains a prototype terrain generator for Minecraft which is capable of replacing vanilla generation blocks
-with other blocks.
\ No newline at end of file
diff --git a/lc-minecraft-terrain/build.gradle b/lc-minecraft-terrain/build.gradle
deleted file mode 100644
index 721a4bdc63ffd2436eefb905b2b4eab3f9b36feb..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/build.gradle
+++ /dev/null
@@ -1,44 +0,0 @@
-plugins {
- id 'java'
- id 'java-library'
- id 'application'
-}
-
-// Version also located in plugin.yml!
-group 'leigh'
-version '1.0'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
- mavenLocal()
- // Bukkit
- maven {
- url = 'https://hub.spigotmc.org/nexus/content/repositories/snapshots/'
- content {
- includeGroup 'org.bukkit'
- includeGroup 'org.spigotmc'
- }
- }
-}
-
-dependencies {
- implementation 'org.jetbrains:annotations:20.1.0'
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
- compileOnly 'org.spigotmc:spigot-api:1.19-R0.1-SNAPSHOT'
- implementation project(':lc-esp-sdk')
-}
-
-test {
- useJUnitPlatform()
-}
-
-jar {
- duplicatesStrategy = DuplicatesStrategy.INCLUDE
- from {
- configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
- }
-}
diff --git a/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/GridPopulator.java b/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/GridPopulator.java
deleted file mode 100644
index 9ac814b1ceb6493db75049cf6740ec8b495a1c39..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/GridPopulator.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package lc.minecraft.terrain;
-
-import org.bukkit.generator.BlockPopulator;
-import org.bukkit.generator.LimitedRegion;
-import org.bukkit.generator.WorldInfo;
-
-import java.util.Random;
-
-public class GridPopulator extends BlockPopulator {
- public void populate(WorldInfo worldInfo, Random random, int chunkX, int chunkZ, LimitedRegion limitedRegion) {
-
-
- }
-}
diff --git a/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/TerrainPlugin.java b/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/TerrainPlugin.java
deleted file mode 100644
index 505f7a6e5a8c9c3c1a36c2cdb0dbcbabdc68332c..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/src/main/java/lc/minecraft/terrain/TerrainPlugin.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package lc.minecraft.terrain;
-
-import org.bukkit.Bukkit;
-import org.bukkit.Chunk;
-import org.bukkit.Material;
-import org.bukkit.block.Block;
-import org.bukkit.event.EventHandler;
-import org.bukkit.event.Listener;
-import org.bukkit.event.world.ChunkLoadEvent;
-import org.bukkit.plugin.java.JavaPlugin;
-
-public class TerrainPlugin extends JavaPlugin implements Listener {
- @Override
- public void onEnable() {
- getServer().getPluginManager().registerEvents(this, this);
- Bukkit.broadcastMessage("Minecraft Terrain mk1");
- }
-
- @Override
- public void onDisable() {
-
- }
-
- @EventHandler
- public void onChunkLoad(ChunkLoadEvent e) {
- if (e.isNewChunk()) {
- Chunk chunk = e.getChunk();
- Block b;
-
- for (int y = -64; y <= 320; y++) {
- for (int x = 0; x < 16; x++) {
- for (int z = 0; z < 16; z++) {
- b = chunk.getBlock(x, y, z);
- String typ = b.getType().toString();
- if (!b.getType().isAir()
- && b.getType() != Material.BEDROCK
- && b.getType() != Material.WATER
- && b.getType() != Material.LAVA
- && b.getType() != Material.END_PORTAL_FRAME
- && b.getType() != Material.END_PORTAL
- && !typ.contains("DOOR")
- && !typ.contains("SNOW")
- && !typ.contains("ORE")
- && !typ.contains("ICE")) {
- if (typ.contains("LEAVES")) {
- b.setType(Material.SEA_LANTERN, false);
- } else {
- b.setType(Material.GRAY_CONCRETE);
- }
- }
- }
- }
- }
- }
- }
-}
\ No newline at end of file
diff --git a/lc-minecraft-terrain/src/main/resources/config.yml b/lc-minecraft-terrain/src/main/resources/config.yml
deleted file mode 100644
index 45324a25d0a83ea63b7f45cc53ff666c5a644fda..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/src/main/resources/config.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-# Intentionally Blank
-
-serverName: changeme
diff --git a/lc-minecraft-terrain/src/main/resources/plugin.yml b/lc-minecraft-terrain/src/main/resources/plugin.yml
deleted file mode 100644
index 4d4a6f5872500cfd1d5cf8e5eca344284b148345..0000000000000000000000000000000000000000
--- a/lc-minecraft-terrain/src/main/resources/plugin.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-name: lc-minecraft-terrain
-main: lc.minecraft.terrain.TerrainPlugin
-version: 1.0
-api-version: 1.19
-depend: [ ]
\ No newline at end of file
diff --git a/lc-telemetry-faketsdb-svc/.gitignore b/lc-telemetry-faketsdb-svc/.gitignore
deleted file mode 100644
index 94143827ed065ca0d7d5be1b765d255c5c32cd9a..0000000000000000000000000000000000000000
--- a/lc-telemetry-faketsdb-svc/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-Dockerfile
diff --git a/lc-telemetry-faketsdb-svc/build.gradle b/lc-telemetry-faketsdb-svc/build.gradle
deleted file mode 100644
index b21bc40caf6176f4fc268731f6723b4ad85926ce..0000000000000000000000000000000000000000
--- a/lc-telemetry-faketsdb-svc/build.gradle
+++ /dev/null
@@ -1,49 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-group 'leighco'
-version '1.3'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project (':lc-esp-sdk')
- implementation project (':lc-mecha-http-server')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
-
-application {
- mainClass = 'lc.telemetry.faketsdb.service.FakeTSDBService'
-}
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-EXPOSE 52018
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
diff --git a/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/FakeTSDBService.java b/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/FakeTSDBService.java
deleted file mode 100644
index 7a87579c6a390631c0cd3a343cc566f9e0dcfe12..0000000000000000000000000000000000000000
--- a/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/FakeTSDBService.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package lc.telemetry.faketsdb.service;
-
-import lc.mecha.fabric.LiteralMessageSubscription;
-import lc.mecha.http.server.WebPipeline;
-import lc.mecha.http.server.WebServer;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-
-import java.util.HashSet;
-
-/**
- * This class implements a fake OpenTSDB service for the purpose of receiving data from OpenTSDB clients such as
- * TCollectors. Instead of storing the data it is reformatted and sent into ESP.
- *
- * @author Alex Leigh
- * @since mk1 (HORIZON BRAVE)
- */
-public class FakeTSDBService extends BasicallyDangerous {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(FakeTSDBService.class);
-
- public static void main(String[] args) throws Exception {
- FakeTSDBService svc = new FakeTSDBService();
- svc.runDangerously();
- }
-
- @Override
- public void runDangerously() throws Exception {
- // 4242 is the default OpenTSDB port
- WebServer srv = new WebServer(4242, 128);
- HashSet prefixes = new HashSet<>();
- prefixes.add("/api");
-
- srv.getWebPipeline().getServicePipeline().getSubscriptionBase().subscribe(
- new LiteralMessageSubscription<>(new TSDBRelay(prefixes), WebPipeline.KEY_SERVICE));
-
- srv.start();
- }
-}
diff --git a/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/TSDBRelay.java b/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/TSDBRelay.java
deleted file mode 100644
index dc094d3780521626251b702f27c302ee2edc26e5..0000000000000000000000000000000000000000
--- a/lc-telemetry-faketsdb-svc/src/main/java/lc/telemetry/faketsdb/service/TSDBRelay.java
+++ /dev/null
@@ -1,86 +0,0 @@
-package lc.telemetry.faketsdb.service;
-
-import lc.esp.sdk.*;
-import lc.esp.sdk.telemetry.TelemetryFrame;
-import lc.esp.sdk.telemetry.TelemetrySymbol;
-import lc.mecha.fabric.HandlerStatus;
-import lc.mecha.http.server.PrefixedHandler;
-import lc.mecha.http.server.WebTransaction;
-import lc.mecha.json.JSONArray;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import org.apache.commons.io.IOUtils;
-
-import java.io.Serializable;
-import java.nio.charset.StandardCharsets;
-import java.time.Instant;
-import java.util.Set;
-
-/**
- * /api/put handler to receive OpenTSDB messages and relay them into ESP.
- *
- * @author Alex Leigh
- * @since mk1 (HORIZON BRAVE)
- */
-public class TSDBRelay extends PrefixedHandler {
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(TSDBRelay.class);
- public static final String KEY_TIMESTAMP = "timestamp";
- public static final String KEY_METRIC = "metric";
- public static final String KEY_TAGS = "tags";
- public static final String KEY_VALUE = "value";
- private final ESPProducer producer;
-
- public TSDBRelay(Set pathPrefixes) throws Exception {
- super(pathPrefixes);
- ESPClient esp = new ESPClient();
- esp.start();
- ESPAddress telemDest = new ESPAddress("lc", "global",
- "env", "mon", ESPAddressClass.TOPIC,
- ESPMessageClass.TELEMETRY);
- ESPSession session = esp.createSession();
- producer = session.createProducer(telemDest);
- }
-
- @Override
- public HandlerStatus handlePrefixedWebRequest(WebTransaction request) throws Throwable {
- try {
- String msgStr = IOUtils.toString(request.httpServletRequest.getInputStream());
- JSONArray tsbMetrics = new JSONArray(msgStr);
- // System.out.println("Received opentsdb request: " + tsbMetrics);
-
- // For reach metric we are going to make a frame with a single symbol because our code is too
- // ignorant to sort by like tags
- for (int i = 0; i < tsbMetrics.length(); i++) {
- JSONObject tsbMetric = tsbMetrics.getJSONObject(i);
- TelemetryFrame frame = new TelemetryFrame(Instant.ofEpochSecond(tsbMetric.getLong(KEY_TIMESTAMP)));
- JSONObject tsdbTags = tsbMetric.getJSONObject(KEY_TAGS);
- for (String tagName : tsdbTags.keySet()) {
- String tagValue = tsdbTags.getString(tagName);
- frame.getTags().put(tagName, tagValue);
- }
- frame.getSymbols().add(new TelemetrySymbol(tsbMetric.getString(KEY_METRIC),
- (Serializable) tsbMetric.get(KEY_VALUE),
- null, false, true));
-
- logger.info("Built frame: {}", frame);
-
- // FIXME: Not threadsafe?
- ESPMessage telemMsg = frame.toMessage();
- producer.send(telemMsg);
- }
-
- JSONObject reply = new JSONObject();
- reply.put("success", tsbMetrics.length());
- reply.put("failed", 0);
- reply.put("errors", new JSONArray());
-
- request.httpServletResponse.getOutputStream().write(reply.toString().getBytes(StandardCharsets.UTF_8));
-
- return HandlerStatus.BREAK;
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- }
- }
-}
diff --git a/lc-telemetry-historian-svc/.gitignore b/lc-telemetry-historian-svc/.gitignore
deleted file mode 100644
index c9c56767351f2752f2fad6e75af920655d3efa8d..0000000000000000000000000000000000000000
--- a/lc-telemetry-historian-svc/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-Dockerfile
-
diff --git a/lc-telemetry-historian-svc/build.gradle b/lc-telemetry-historian-svc/build.gradle
deleted file mode 100644
index bb69c47471da95c3627dd42e8a3cc563912d05d1..0000000000000000000000000000000000000000
--- a/lc-telemetry-historian-svc/build.gradle
+++ /dev/null
@@ -1,51 +0,0 @@
-plugins {
- id 'java'
- id 'application'
- id "com.palantir.docker" version "0.33.0"
-}
-
-group 'leighco'
-version '1.9'
-
-sourceCompatibility = JavaVersion.VERSION_17
-targetCompatibility = JavaVersion.VERSION_17
-
-repositories {
- mavenCentral()
-}
-
-dependencies {
- implementation project(':lc-esp-sdk')
- implementation project(':lc-mecha-http-client')
- testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0'
- testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0'
-}
-
-test {
- useJUnitPlatform()
-}
-
-
-application {
- mainClass = 'lc.telemetry.historian.service.HistorianService'
-}
-
-
-docker {
- name "docker.leigh-co.com/${project.name}:${project.version}"
- files tasks.distTar.outputs
-}
-
-task dockerfile() {
- doLast {
- new File(projectDir, 'Dockerfile').text = """FROM docker.leigh-co.com/openjdk:17-alpine
-EXPOSE 52018
-RUN mkdir /app
-COPY ${project.name}-${project.version}.tar /dist.tar
-RUN tar xfv /dist.tar
-RUN rm /dist.tar
-ENTRYPOINT ["/${project.name}-${project.version}/bin/${project.name}"]
-"""
- }
-}
-
diff --git a/lc-telemetry-historian-svc/src/main/java/lc/telemetry/historian/service/HistorianService.java b/lc-telemetry-historian-svc/src/main/java/lc/telemetry/historian/service/HistorianService.java
deleted file mode 100644
index c7d066527a7c89bf2f386654223845ed4eded2b7..0000000000000000000000000000000000000000
--- a/lc-telemetry-historian-svc/src/main/java/lc/telemetry/historian/service/HistorianService.java
+++ /dev/null
@@ -1,167 +0,0 @@
-package lc.telemetry.historian.service;
-
-import lc.esp.sdk.*;
-import lc.esp.sdk.telemetry.TelemetryFrame;
-import lc.esp.sdk.telemetry.TelemetrySymbol;
-import lc.mecha.json.JSONArray;
-import lc.mecha.json.JSONObject;
-import lc.mecha.log.MechaLogger;
-import lc.mecha.log.MechaLoggerFactory;
-import lc.mecha.util.BasicallyDangerous;
-import lc.mecha.util.StringAccumulatorV2;
-import lc.mecha.util.UniversalJob;
-import lc.mecha.util.VelocityWatch;
-import lc.zero.sdk.ZeroClient;
-import lc.zero.sdk.ZeroServiceConfig;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.BasicHttpEntity;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
-import org.apache.http.util.EntityUtils;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadPoolExecutor;
-
-/**
- * The historian service receives telemetry messages from the ESP
- * message fabric and relays those messages to an OpenTSDB server.
- *
- * @author Alex Leigh
- * @since mk18 (GIPSY DANGER)
- */
-public class HistorianService extends BasicallyDangerous {
- public static final String ZERO_SERVICE = "lc.historian";
- public static final String TSDB_KEY_METRIC = "metric";
- public static final String TSDB_KEY_TIMESTAMP = "timestamp";
- public static final String TSDB_KEY_VALUE = "value";
- public static final String TSDB_KEY_TAGS = "tags";
- private static final MechaLogger logger = MechaLoggerFactory.getLogger(HistorianService.class);
- private final CloseableHttpClient httpClient;
- private final ThreadPoolExecutor exec = (ThreadPoolExecutor) Executors.newCachedThreadPool();
- private final VelocityWatch vw = new VelocityWatch(logger);
- private String opentsdbUrl;
-
- public static void main(String[] args) {
- UniversalJob.banner(logger, "Historian mk1 (HORIZON BRAVE)");
- new HistorianService().run();
- }
-
- public HistorianService() {
- PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager();
- connectionManager.setMaxTotal(100);
- connectionManager.setDefaultMaxPerRoute(100);
- this.httpClient = HttpClients.custom().setConnectionManager(connectionManager).build();
- }
-
- @Override
- public void runDangerously() throws Exception {
- try (ESPClient esp = new ESPClient()) {
- esp.start();
-
- ZeroClient zero = esp.getZero();
- ZeroServiceConfig cfg = zero.getZai().readConfig(ZERO_SERVICE);
- if (cfg == null) {
- logger.error("No ZERO service configuration provided.");
- System.exit(UniversalJob.RET_BADENV);
- }
- logger.info("Found service configuration: {}", cfg);
-
- String addrUrl = cfg.getCfg().getString("address");
- ESPAddress telemDest = new ESPAddress(addrUrl);
- logger.info("Found address: {}", telemDest);
-
- opentsdbUrl = cfg.getCfg().getString("opentsdb");
-
- try (ESPSession session = esp.createSession()) {
- try (ESPConsumer consumer = session.createConsumer(telemDest)) {
- //noinspection InfiniteLoopStatement
- while (true) {
- ESPMessage msg = consumer.receive();
- publish(msg);
- }
- }
- }
- }
- }
-
- /**
- * Publish the given telemetry message to OpenTSDB.
- */
- private void publish(ESPMessage msg) {
- exec.execute(new BasicallyDangerous() {
- @Override
- public void runDangerously() throws IOException {
- HttpPost httpPost = new HttpPost(opentsdbUrl);
- TelemetryFrame tf = new TelemetryFrame(msg);
-
- vw.event("frame");
-
- logger.trace("Built frame: {}", tf);
-
- // opentsdb only supports seconds precision
- long seconds = tf.getTime().getEpochSecond();
-
- JSONArray arr = new JSONArray();
-
- for (TelemetrySymbol symbol : tf.getSymbols()) {
- JSONObject symJson = new JSONObject();
-
- StringAccumulatorV2 sa = new StringAccumulatorV2(".");
- sa.push(symbol.getName());
- // TODO: We need to enforce formatting here
- String uom = symbol.getUom();
- if (uom != null) {
- symJson.put(TSDB_KEY_METRIC, sa.asString() + "-" + uom);
- } else {
- symJson.put(TSDB_KEY_METRIC, sa.asString());
- }
-
- symJson.put(TSDB_KEY_TIMESTAMP, seconds);
- symJson.put(TSDB_KEY_VALUE, symbol.getValue());
- JSONObject tagsJson = new JSONObject();
- symJson.put(TSDB_KEY_TAGS, tagsJson);
-
- for (Map.Entry tag : tf.getTags().entrySet()) {
- tagsJson.put(tag.getKey(), tag.getValue());
- }
-
- arr.put(symJson);
- }
-
- // OpenTSDB does not support chunked requests!
- BasicHttpEntity requestEntity = new BasicHttpEntity();
- byte[] r = arr.toString().getBytes(StandardCharsets.UTF_8);
- logger.trace("Built TSDB message: {}", arr.toString());
-
- requestEntity.setContent(new ByteArrayInputStream(r));
- requestEntity.setContentLength(r.length);
- requestEntity.setChunked(false);
-
- httpPost.setEntity(requestEntity);
- HttpResponse httpResponse = httpClient.execute(httpPost);
-
- // TSDB is very terse, it responds 204 on success
- /*
- if (httpResponse.getStatusLine().getStatusCode() != 204) {
- System.out.println(httpResponse.getStatusLine());
- Scanner sc = new Scanner(httpResponse.getEntity().getContent());
- while (sc.hasNext()) {
- System.out.println(sc.nextLine());
- }
- }
-
- */
-
- EntityUtils.consumeQuietly(httpResponse.getEntity());
- }
- });
- logger.trace("Received message: {}", msg);
- }
-}
diff --git a/settings.gradle b/settings.gradle
index 81242c7613df138f19718daf5a6d1285e4040c49..28a1a8d36420826ed9efee76f720bd5014b853e8 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -1,3 +1,4 @@
+include 'lc-amf'
include 'lc-astro'
include 'lc-astro-indi'
include 'lc-astro-indi-client'
@@ -9,18 +10,29 @@ include 'lc-chain-pki-app'
include 'lc-chain-pki-svc'
include 'lc-cloudbox'
include 'lc-conflux'
+include 'lc-data-flight'
+include 'lc-dns-svc'
include 'lc-eo'
include 'lc-eo-bson'
+include 'lc-eo-changestream'
include 'lc-eo-json'
include 'lc-eo-schema'
include 'lc-eo-yaml'
include 'lc-example-calcite'
include 'lc-example-graphql'
+include 'lc-example-xdr'
+include 'lc-gdn-api-svc'
include 'lc-gis'
include 'lc-gnusto'
+include 'lc-isf-tc'
+include 'lc-isles'
+include 'lc-isles-svc'
+include 'lc-junk'
+include 'lc-leighcraft'
include 'lc-lighting'
include 'lc-mecha'
include 'lc-mecha-audio'
+include 'lc-mecha-calc'
include 'lc-mecha-cisco'
include 'lc-mecha-cli'
include 'lc-mecha-db'
@@ -39,54 +51,16 @@ include 'lc-mecha-mq'
include 'lc-mecha-nodegraph'
include 'lc-mecha-olap'
include 'lc-mecha-pdf'
+include 'lc-mecha-quic'
include 'lc-mecha-smtp'
include 'lc-notek-app'
include 'lc-radio'
-include 'lc-service-sdk-example'
-include 'lc-dns-svc'
-include 'lc-zero-sdk'
-include 'lc-zero-beacon-svc'
include 'lc-tesseract'
include 'lc-video'
include 'lc-video-edltool'
-rootProject.name = 'massive'
-include 'lc-esp-sdk'
-include 'lc-esp-engine-svc'
-include 'lc-esp-service-clock'
-include 'lc-minecraft-link'
-include 'lc-minecraft-link-quests'
-include 'lc-esp-monitor-app'
-include 'lc-junk'
-include 'lc-hello-app'
-include 'lc-telemetry-historian-svc'
-include 'lc-telemetry-faketsdb-svc'
-include 'lc-gdn-ipbeacon-svc'
-include 'lc-ircbot-svc'
-include 'lc-evelyn-svc'
-include 'lc-mecha-calc'
-include 'lc-esp-cli-sdk'
-include 'lc-evelyn-svc'
-include ':qody:qody-vision-svc'
-include 'lc-ionos'
-include 'lc-data-flight'
-include 'qody:qody-vision-schema'
-findProject(':qody:qody-vision-schema')?.name = 'qody-vision-schema'
-include 'lc-eo-changestream'
-include 'lc-elements-sdk'
-include 'lc-elements-esp'
-include 'lc-minecraft-terrain'
-include 'lc-minecraft-link-sdk'
-include 'lc-minecraft-leighco-svc'
-include 'lc-esp-test-svc'
-include 'lc-esp-test-sdk'
-include 'lc-isles'
-include 'lc-isles-svc'
include 'lc-violet'
include 'lc-violet-svc'
-include 'lc-amf'
-include 'lc-leighcraft'
-include 'lc-mecha-quic'
-include 'lc-example-xdr'
+include 'lc-zero-beacon-svc'
+include 'lc-zero-sdk'
include 'mabe-lanterna'
-include 'lc-isf-tc'
-include 'lc-gdn-api-svc'
\ No newline at end of file
+rootProject.name = 'massive'
\ No newline at end of file