<?xml version="1.0" encoding="UTF-8"?><beans:beansxmlns="http://www.springframework.org/schema/security"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"xmlns:beans="http://www.springframework.org/schema/beans"xsi:schemaLocation=" http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/security https://www.springframework.org/schema/security/spring-security.xsd"><!-- Resources not processed by spring security filters --><httppattern="/images/**"security="none"/><httppattern="/scripts/**"security="none"/><httppattern="/styles/**"security="none"/><http><intercept-urlpattern="/error"access="permitAll"/><intercept-urlpattern="/login*/**"access="permitAll"/><intercept-urlpattern="/signup*"access="permitAll"/><intercept-urlpattern="/admin/**"access="hasRole('ROLE_ADMIN')"/><intercept-urlpattern="/**"access="isAuthenticated()"/><form-loginlogin-page="/login"authentication-success-handler-ref="authenticationSuccessHandler"authentication-failure-handler-ref="authenticationFailureHandler"/><remember-meuser-service-ref="userDetails"key="aaa"/><logoutlogout-url="/logout"logout-success-url="/login"invalidate-session="true"delete-cookies="aaa"/></http><authentication-manageralias="authenticationManager"><authentication-provideruser-service-ref="userDetails"><password-encoderref="passwordEncoder"/></authentication-provider></authentication-manager><beans:beanid="authenticationSuccessHandler"class="common.webapp.filter.ExtendedAuthenticationSuccessHandler"><beans:constructor-argvalue="/top"/></beans:bean><beans:beanid="authenticationFailureHandler"class="common.webapp.filter.ExtendedAuthenticationFailureHandler"><beans:propertyname="exceptionMappings"><beans:props><beans:propkey="org.springframework.security.authentication.DisabledException">/login/accountDisabled</beans:prop><beans:propkey="org.springframework.security.authentication.LockedException">/login/accountLocked</beans:prop><beans:propkey="org.springframework.security.authentication.AccountExpiredException">/login/accountExpired</beans:prop><beans:propkey="org.springframework.security.authentication.CredentialsExpiredException">/login/credentialsExpired</beans:prop><beans:propkey="org.springframework.security.authentication.BadCredentialsException">/login/badCredentials</beans:prop></beans:props></beans:property></beans:bean><beans:beanid="webexpressionHandler"class="org.springframework.security.web.access.expression.DefaultWebSecurityExpressionHandler"/></beans:beans>
defget_best_live_url(youtube_url:str)->str: ydl_opts ={'quiet':True,'no_warnings':True,'skip_download':True,# Filters in order of ease of live performance'format':'best[protocol^=m3u8]/best[ext=mp4]/best'}withYoutubeDL(ydl_opts)as ydl: info = ydl.extract_info(youtube_url,download=False)# `url` contains a URL that can be played directly (assuming m3u8) stream_url = info.get('url')ifnot stream_url:for f in info.get('formats',[]):if'm3u8'in(f.get('protocol')or''): stream_url = f.get('url')breakifnot stream_url:raiseRuntimeError('Unable to get live playback URL.')return stream_url
YouTubeの動画、ライブ配信を開く関数
defopen_live_capture(stream_url:str)-> cv2.VideoCapture: cap = cv2.VideoCapture(stream_url)# Latency reduction (enabled builds only)try: cap.set(cv2.CAP_PROP_BUFFERSIZE,1)exceptException:passreturn cap
youtube_url ='https://www.youtube.com/watch?v=好きな動画のURL'stream_url =get_best_live_url(youtube_url)print('stream:', stream_url)cap =open_live_capture(stream_url)ifnot cap.isOpened():raiseRuntimeError('Failed to open VideoCapture. Please use an FFmpeg-enabled build of OpenCV.')
/** * Simple static factory method to add some syntactic sugar around a {@link Specification}. * * @apiNote with 4.0, this method will no longer accept {@literal null} specifications. * @param<T> the type of the {@link Root} the resulting {@literal Specification} operates on. * @paramspec can be {@literal null}. * @return guaranteed to be not {@literal null}. * @since 2.0 * @deprecated since 3.5, to be removed with 4.0 as we no longer want to support {@literal null} specifications. */@Deprecated(since="3.5.0",forRemoval=true)static<T>Specification<T>where(@NullableSpecification<T> spec){return spec ==null?(root, query, builder)->null: spec;}
The where method merely caters for the broken nullability allowance and is replaced by where(PredicateSpecification) in 4.0.
まだ確定ではないかもしれないが、mainでは下記の様に修正されている。
/** * Simple static factory method to add some syntactic sugar translating {@link PredicateSpecification} to * {@link Specification}. * * @param<T> the type of the {@link Root} the resulting {@literal Specification} operates on. * @paramspec the {@link PredicateSpecification} to wrap. * @return guaranteed to be not {@literal null}. */static<T>Specification<T>where(PredicateSpecification<T> spec){Assert.notNull(spec,"PredicateSpecification must not be null");return(root, update, criteriaBuilder)->spec.toPredicate(root, criteriaBuilder);}
AGPL-3.0 License: This OSI-approved open-source license is perfect for students, researchers, and enthusiasts. It encourages open collaboration and knowledge sharing. See the LICENSE file for full details.
import timeimport cv2import numpy as npfrom ultralytics import YOLO# Load the model into memory and get labemapmodel =YOLO('yolo11l.pt',task='detect')labels = model.names# Load image sourcecap = cv2.VideoCapture(0)# Set bounding box colors (using the Tableu 10 color scheme)bbox_colors =[(164,120,87),(68,148,228),(93,97,209),(178,182,133),(88,159,106),(96,202,231),(159,124,168),(169,162,241),(98,118,150),(172,176,184)]# Initialize control and status variablesavg_frame_rate =0frame_rate_buffer =[]fps_avg_len =200# Begin inference loopwhileTrue: t_start = time.perf_counter()# Load frame from image source ret, frame = cap.read()if(frame isNone)or(not ret):print('Unable to read frames from the camera. This indicates the camera is disconnected or not working. Exiting program.')break# Run inference on frame results =model(frame,verbose=False)# Extract results detections = results[0].boxes# Initialize variable for basic object counting example object_count =0# Go through each detection and get bbox coords, confidence, and classfor i inrange(len(detections)):# Get bounding box coordinates# Ultralytics returns results in Tensor format, which have to be converted to a regular Python array xyxy_tensor = detections[i].xyxy.cpu()# Detections in Tensor format in CPU memory xyxy = xyxy_tensor.numpy().squeeze()# Convert tensors to Numpy array xmin, ymin, xmax, ymax = xyxy.astype(int)# Extract individual coordinates and convert to int# Get bounding box class ID and name classidx =int(detections[i].cls.item()) classname = labels[classidx]# Get bounding box confidence conf = detections[i].conf.item()# Draw box if confidence threshold is high enoughif conf >0.5: color = bbox_colors[classidx %10] cv2.rectangle(frame,(xmin,ymin),(xmax,ymax), color,2) label =f'{classname}: {int(conf*100)}%' labelSize, baseLine = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX,0.5,1)# Get font size label_ymin =max(ymin, labelSize[1]+10)# Make sure not to draw label too close to top of window cv2.rectangle(frame,(xmin, label_ymin-labelSize[1]-10),(xmin+labelSize[0], label_ymin+baseLine-10), color, cv2.FILLED)# Draw white box to put label text in cv2.putText(frame, label,(xmin, label_ymin-7), cv2.FONT_HERSHEY_SIMPLEX,0.5,(0,0,0),1)# Draw label text# Basic example: count the number of objects in the image object_count = object_count +1# Calculate and draw framerate (if using video, USB, or Picamera source) cv2.putText(frame,f'FPS: {avg_frame_rate:0.2f}',(10,20), cv2.FONT_HERSHEY_SIMPLEX,.7,(0,255,255),2)# Draw framerate# Display detection results cv2.putText(frame,f'Number of objects: {object_count}',(10,40), cv2.FONT_HERSHEY_SIMPLEX,.7,(0,255,255),2)# Draw total number of detected objects cv2.imshow('YOLO detection results',frame)# Display image# If inferencing on individual images, wait for user keypress before moving to next image. Otherwise, wait 5ms before moving to next frame. key = cv2.waitKey(5)if key ==ord('q')or key ==ord('Q'):# Press 'q' to quitbreakelif key ==ord('s')or key ==ord('S'):# Press 's' to pause inference cv2.waitKey()elif key ==ord('p')or key ==ord('P'):# Press 'p' to save a picture of results on this frame cv2.imwrite('capture.png',frame)# Calculate FPS for this frame t_stop = time.perf_counter() frame_rate_calc =float(1/(t_stop - t_start))# Append FPS result to frame_rate_buffer (for finding average FPS over multiple frames)iflen(frame_rate_buffer)>= fps_avg_len: temp = frame_rate_buffer.pop(0) frame_rate_buffer.append(frame_rate_calc)else: frame_rate_buffer.append(frame_rate_calc)# Calculate average FPS for past frames avg_frame_rate = np.mean(frame_rate_buffer)# Clean upprint(f'Average pipeline FPS: {avg_frame_rate:.2f}')cap.release()cv2.destroyAllWindows()
Modelをyolo11n.ptにしてRaspberry Pi 4でも動かしたが、0.9 FPSしか出なかった。Raspberry Pi 5やRaspberry Pi AI HAT+が欲しくなる。