001package io.ebean;
002
003import io.avaje.lang.NonNullApi;
004import io.avaje.lang.Nullable;
005import io.ebean.docstore.DocQueryContext;
006import io.ebean.docstore.RawDoc;
007
008import java.io.IOException;
009import java.util.List;
010import java.util.Map;
011import java.util.function.Consumer;
012import java.util.function.Predicate;
013
014/**
015 * Document storage operations.
016 */
017@NonNullApi
018public interface DocumentStore {
019
020  /**
021   * Update the associated document store using the result of the query.
022   * <p>
023   * This will execute the query against the database creating a document for each
024   * bean graph and sending this to the document store.
025   * </p>
026   * <p>
027   * Note that the select and fetch paths of the query is set for you to match the
028   * document structure needed based on <code>@DocStore</code> and <code>@DocStoreEmbedded</code>
029   * so what this query requires is the predicates only.
030   * </p>
031   * <p>
032   * This query will be executed using findEach so it is safe to use a query
033   * that will fetch a lot of beans. The default bulkBatchSize is used.
034   * </p>
035   *
036   * @param query The query that selects object to send to the document store.
037   */
038  <T> void indexByQuery(Query<T> query);
039
040  /**
041   * Update the associated document store index using the result of the query additionally specifying a
042   * bulkBatchSize to use for sending the messages to ElasticSearch.
043   *
044   * @param query         The query that selects object to send to the document store.
045   * @param bulkBatchSize The batch size to use when bulk sending to the document store.
046   */
047  <T> void indexByQuery(Query<T> query, int bulkBatchSize);
048
049  /**
050   * Update the document store for all beans of this type.
051   * <p>
052   * This is the same as indexByQuery where the query has no predicates and so fetches all rows.
053   * </p>
054   */
055  void indexAll(Class<?> beanType);
056
057  /**
058   * Return the bean by fetching it's content from the document store.
059   * If the document is not found null is returned.
060   * <p>
061   * Typically this is called indirectly by findOne() on the query.
062   * </p>
063   * <pre>{@code
064   *
065   * Customer customer =
066   *   database.find(Customer.class)
067   *     .setUseDocStore(true)
068   *     .setId(42)
069   *     .findOne();
070   *
071   * }</pre>
072   */
073  @Nullable
074  <T> T find(DocQueryContext<T> request);
075
076  /**
077   * Execute the find list query. This request is prepared to execute secondary queries.
078   * <p>
079   * Typically this is called indirectly by findList() on the query that has setUseDocStore(true).
080   * </p>
081   * <pre>{@code
082   *
083   * List<Customer> newCustomers =
084   *  database.find(Customer.class)
085   *    .setUseDocStore(true)
086   *    .where().eq("status, Customer.Status.NEW)
087   *    .findList();
088   *
089   * }</pre>
090   */
091  <T> List<T> findList(DocQueryContext<T> request);
092
093  /**
094   * Execute the query against the document store returning the paged list.
095   * <p>
096   * The query should have <code>firstRow</code> or <code>maxRows</code> set prior to calling this method.
097   * </p>
098   * <p>
099   * Typically this is called indirectly by findPagedList() on the query that has setUseDocStore(true).
100   * </p>
101   * <pre>{@code
102   *
103   * PagedList<Customer> newCustomers =
104   *  database.find(Customer.class)
105   *    .setUseDocStore(true)
106   *    .where().eq("status, Customer.Status.NEW)
107   *    .setMaxRows(50)
108   *    .findPagedList();
109   *
110   * }</pre>
111   */
112  <T> PagedList<T> findPagedList(DocQueryContext<T> request);
113
114  /**
115   * Execute the query against the document store with the expectation of a large set of results
116   * that are processed in a scrolling resultSet fashion.
117   * <p>
118   * For example, with the ElasticSearch doc store this uses SCROLL.
119   * </p>
120   * <p>
121   * Typically this is called indirectly by findEach() on the query that has setUseDocStore(true).
122   * </p>
123   * <pre>{@code
124   *
125   *  database.find(Order.class)
126   *    .setUseDocStore(true)
127   *    .where()... // perhaps add predicates
128   *    .findEach((Order order) -> {
129   *      // process the bean ...
130   *    });
131   *
132   * }</pre>
133   */
134  <T> void findEach(DocQueryContext<T> query, Consumer<T> consumer);
135
136  /**
137   * Execute the query against the document store with the expectation of a large set of results
138   * that are processed in a scrolling resultSet fashion.
139   * <p>
140   * Unlike findEach() this provides the opportunity to stop iterating through the large query.
141   * </p>
142   * <p>
143   * For example, with the ElasticSearch doc store this uses SCROLL.
144   * </p>
145   * <p>
146   * Typically this is called indirectly by findEachWhile() on the query that has setUseDocStore(true).
147   * </p>
148   * <pre>{@code
149   *
150   *  database.find(Order.class)
151   *    .setUseDocStore(true)
152   *    .where()... // perhaps add predicates
153   *    .findEachWhile(new Predicate<Order>() {
154   *      @Override
155   *      public void accept(Order bean) {
156   *        // process the bean
157   *
158   *        // return true to continue, false to stop
159   *        // boolean shouldContinue = ...
160   *        return shouldContinue;
161   *      }
162   *    });
163   *
164   * }</pre>
165   */
166  <T> void findEachWhile(DocQueryContext<T> query, Predicate<T> consumer);
167
168  /**
169   * Find each processing raw documents.
170   *
171   * @param indexNameType The full index name and type
172   * @param rawQuery      The query to execute
173   * @param consumer      Consumer to process each document
174   */
175  void findEach(String indexNameType, String rawQuery, Consumer<RawDoc> consumer);
176
177  /**
178   * Find each processing raw documents stopping when the predicate returns false.
179   *
180   * @param indexNameType The full index name and type
181   * @param rawQuery      The query to execute
182   * @param consumer      Consumer to process each document until false is returned
183   */
184  void findEachWhile(String indexNameType, String rawQuery, Predicate<RawDoc> consumer);
185
186  /**
187   * Process the queue entries sending updates to the document store or queuing them for later processing.
188   */
189  long process(List<DocStoreQueueEntry> queueEntries) throws IOException;
190
191  /**
192   * Drop the index from the document store (similar to DDL drop table).
193   * <pre>{@code
194   *
195   *   DocumentStore documentStore = database.docStore();
196   *
197   *   documentStore.dropIndex("product_copy");
198   *
199   * }</pre>
200   */
201  void dropIndex(String indexName);
202
203  /**
204   * Create an index given a mapping file as a resource in the classPath (similar to DDL create table).
205   * <pre>{@code
206   *
207   *   DocumentStore documentStore = database.docStore();
208   *
209   *   // uses product_copy.mapping.json resource
210   *   // ... to define mappings for the index
211   *
212   *   documentStore.createIndex("product_copy", null);
213   *
214   * }</pre>
215   *
216   * @param indexName the name of the new index
217   * @param alias     the alias of the index
218   */
219  void createIndex(String indexName, String alias);
220
221  /**
222   * Modify the settings on an index.
223   * <p>
224   * For example, this can be used be used to set elasticSearch refresh_interval
225   * on an index before a bulk update.
226   * </p>
227   * <pre>{@code
228   *
229   *   // refresh_interval -1 ... disable refresh while bulk loading
230   *
231   *   Map<String,Object> settings = new LinkedHashMap<>();
232   *   settings.put("refresh_interval", "-1");
233   *
234   *   documentStore.indexSettings("product", settings);
235   *
236   * }</pre>
237   * <pre>{@code
238   *
239   *   // refresh_interval 1s ... restore after bulk loading
240   *
241   *   Map<String,Object> settings = new LinkedHashMap<>();
242   *   settings.put("refresh_interval", "1s");
243   *
244   *   documentStore.indexSettings("product", settings);
245   *
246   * }</pre>
247   *
248   * @param indexName the name of the index to update settings on
249   * @param settings  the settings to set on the index
250   */
251  void indexSettings(String indexName, Map<String, Object> settings);
252
253  /**
254   * Copy the index to a new index.
255   * <p>
256   * This copy process does not use the database but instead will copy from the source index to a destination index.
257   * </p>
258   * <pre>{@code
259   *
260   *  long copyCount = documentStore.copyIndex(Product.class, "product_copy");
261   *
262   * }</pre>
263   *
264   * @param beanType The bean type of the source index
265   * @param newIndex The name of the index to copy to
266   * @return the number of documents copied to the new index
267   */
268  long copyIndex(Class<?> beanType, String newIndex);
269
270  /**
271   * Copy entries from an index to a new index but limiting to documents that have been
272   * modified since the sinceEpochMillis time.
273   * <p>
274   * To support this the document needs to have a <code>@WhenModified</code> property.
275   * </p>
276   * <pre>{@code
277   *
278   *  long copyCount = documentStore.copyIndex(Product.class, "product_copy", sinceMillis);
279   *
280   * }</pre>
281   *
282   * @param beanType The bean type of the source index
283   * @param newIndex The name of the index to copy to
284   * @return the number of documents copied to the new index
285   */
286  long copyIndex(Class<?> beanType, String newIndex, long sinceEpochMillis);
287
288  /**
289   * Copy from a source index to a new index taking only the documents
290   * matching the given query.
291   * <pre>{@code
292   *
293   *  // predicates to select the source documents to copy
294   *  Query<Product> query = database.find(Product.class)
295   *    .where()
296   *      .ge("whenModified", new Timestamp(since))
297   *      .ge("name", "A")
298   *      .lt("name", "D")
299   *      .query();
300   *
301   *  // copy from the source index to "product_copy" index
302   *  long copyCount = documentStore.copyIndex(query, "product_copy", 1000);
303   *
304   * }</pre>
305   *
306   * @param query         The query to select the source documents to copy
307   * @param newIndex      The target index to copy the documents to
308   * @param bulkBatchSize The ElasticSearch bulk batch size, if 0 uses the default.
309   * @return The number of documents copied to the new index.
310   */
311  long copyIndex(Query<?> query, String newIndex, int bulkBatchSize);
312}