Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,12 @@
import org.apache.accumulo.core.util.Pair;
import org.apache.accumulo.core.util.threads.Threads;
import org.apache.accumulo.monitor.next.InformationFetcher;
import org.apache.accumulo.monitor.rest.bulkImports.BulkImport;
import org.apache.accumulo.monitor.rest.bulkImports.BulkImportInformation;
import org.apache.accumulo.server.AbstractServer;
import org.apache.accumulo.server.ServerContext;
import org.apache.accumulo.server.util.TableInfoUtil;
import org.apache.accumulo.server.util.bulkCommand.ListBulk;
import org.apache.thrift.transport.TTransportException;
import org.apache.zookeeper.KeeperException;
import org.eclipse.jetty.ee10.servlet.ResourceServlet;
Expand Down Expand Up @@ -566,6 +569,9 @@ public static class CompactionStats {
Suppliers.memoizeWithExpiration(this::computeExternalCompactionsSnapshot,
expirationTimeMinutes, MINUTES);

private final Supplier<BulkImport> bulkImportSupplier =
Suppliers.memoizeWithExpiration(this::computeBulkImports, expirationTimeMinutes, MINUTES);

/**
* @return active tablet server scans. Values are cached and refresh after
* {@link #expirationTimeMinutes}.
Expand Down Expand Up @@ -610,6 +616,20 @@ public long getCompactorInfoFetchedTimeMillis() {
return compactorInfoSupplier.get().fetchedTimeMillis;
}

private BulkImport computeBulkImports() {
BulkImport bulkImport = new BulkImport();
ListBulk.list(getContext(), bulkStatus -> {
bulkImport.addBulkImport(
new BulkImportInformation(bulkStatus.sourceDir(), bulkStatus.lastUpdate().toEpochMilli(),
bulkStatus.state(), bulkStatus.tableId(), bulkStatus.fateId()));
});
return bulkImport;
}

public BulkImport getBulkImports() {
return bulkImportSupplier.get();
}

/**
* @return running external compactions keyed by ECID.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
*/
package org.apache.accumulo.monitor.rest.bulkImports;

import org.apache.accumulo.core.data.TableId;
import org.apache.accumulo.core.fate.FateId;
import org.apache.accumulo.server.util.bulkCommand.ListBulk;

/**
Expand All @@ -28,11 +30,11 @@
public class BulkImportInformation {

// Variable names become JSON key
public String filename;
public long age;
public ListBulk.BulkState state;

public BulkImportInformation() {}
public final String filename;
public final long age;
public final ListBulk.BulkState state;
public final String tableId;
public final String fateId;

/**
* Creates new bulk import object
Expand All @@ -41,9 +43,12 @@ public BulkImportInformation() {}
* @param age age of the bulk import
* @param state state of the bulk import
*/
public BulkImportInformation(String filename, long age, ListBulk.BulkState state) {
public BulkImportInformation(String filename, long age, ListBulk.BulkState state, TableId tableId,
FateId fateId) {
this.filename = filename;
this.age = age;
this.state = state;
this.tableId = tableId.canonical();
this.fateId = fateId.canonical();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import jakarta.ws.rs.core.MediaType;

import org.apache.accumulo.monitor.Monitor;
import org.apache.accumulo.server.util.bulkCommand.ListBulk;

/**
* The BulkImportResource is responsible for obtaining the information of the bulk import, and
Expand All @@ -47,12 +46,6 @@ public class BulkImportResource {
*/
@GET
public BulkImport getTables() {
BulkImport bulkImport = new BulkImport();
ListBulk.list(monitor.getContext(), bulkStatus -> {
bulkImport.addBulkImport(new BulkImportInformation(bulkStatus.sourceDir(),
bulkStatus.lastUpdate().toEpochMilli(), bulkStatus.state()));
});

return bulkImport;
return monitor.getBulkImports();
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,20 @@ $(function () {
"stateSave": true,
"autoWidth": false,
"columns": [{
"data": "tableId",
"width": "5%"
},
{
"data": "fateId",
"width": "25%"
},
{
"data": "filename",
"width": "40%"
"width": "35%"
},
{
"data": "age",
"width": "45%",
"width": "25%",
"render": function (data, type) {
var age = Number(data);
if (type === 'display') {
Expand All @@ -67,40 +75,7 @@ $(function () {
},
{
"data": "state",
"width": "15%"
}
]
});

// Generates the bulkPerServerTable DataTable
bulkPerServerTable = $('#bulkPerServerTable').DataTable({
"ajax": {
"url": url,
"dataSrc": "tabletServerBulkImport"
},
"stateSave": true,
"columns": [{
"data": "server",
"type": "html",
"render": function (data, type) {
if (type === 'display') {
data = `<a href="tservers?s=${data}">${data}</a>`;
}
return data;
}
},
{
"data": "importSize"
},
{
"data": "oldestAge",
"render": function (data, type) {
var age = Number(data);
if (type === 'display') {
return age > 0 ? new Date(age) : "-";
}
return age > 0 ? age : 0;
}
"width": "10%"
}
]
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
<caption><span class="table-caption">Bulk Imports</span><br /></caption>
<thead>
<tr>
<th>Table ID&nbsp;</th>
<th>Fate ID&nbsp;</th>
<th>Directory&nbsp;</th>
<th title="The age of the import.">Age&nbsp;</th>
<th title="The current state of the bulk import">State&nbsp;</th>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,8 @@ public void testFate() throws Exception {
log.debug("Deleted lock of primary manager");
waitToSeeManagers(ctx, 2, store, true);

getCluster().getProcesses();

stop.set(true);
// Wait for the background operations to complete and ensure that none had errors. Managers
// stoppping/starting should not cause any problems for Accumulo API operations.
Expand Down